var/home/core/zuul-output/0000755000175000017500000000000015116034202014517 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116040501015462 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003702367415116040473017712 0ustar rootrootDec 09 14:58:21 crc systemd[1]: Starting Kubernetes Kubelet... Dec 09 14:58:21 crc restorecon[4561]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 09 14:58:21 crc restorecon[4561]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 09 14:58:21 crc kubenswrapper[4583]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 09 14:58:21 crc kubenswrapper[4583]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 09 14:58:21 crc kubenswrapper[4583]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 09 14:58:21 crc kubenswrapper[4583]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 09 14:58:21 crc kubenswrapper[4583]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 09 14:58:21 crc kubenswrapper[4583]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.959653 4583 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964434 4583 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964452 4583 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964459 4583 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964464 4583 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964469 4583 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964473 4583 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964477 4583 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964480 4583 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964483 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964486 4583 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964490 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964494 4583 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964497 4583 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964500 4583 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964503 4583 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964507 4583 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964510 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964513 4583 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964516 4583 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964519 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964522 4583 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964526 4583 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964541 4583 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964544 4583 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964548 4583 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964552 4583 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964555 4583 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964558 4583 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964562 4583 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964565 4583 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964568 4583 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964571 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964574 4583 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964578 4583 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964581 4583 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964584 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964587 4583 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964590 4583 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964595 4583 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964600 4583 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964603 4583 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964607 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964610 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964613 4583 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964618 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964621 4583 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964626 4583 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964630 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964634 4583 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964638 4583 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964642 4583 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964645 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964648 4583 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964652 4583 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964655 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964658 4583 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964661 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964664 4583 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964667 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964670 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964674 4583 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964679 4583 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964683 4583 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964687 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964690 4583 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964694 4583 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964697 4583 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964700 4583 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964703 4583 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964706 4583 feature_gate.go:330] unrecognized feature gate: Example Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.964710 4583 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964799 4583 flags.go:64] FLAG: --address="0.0.0.0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964807 4583 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964813 4583 flags.go:64] FLAG: --anonymous-auth="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964818 4583 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964822 4583 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964826 4583 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964830 4583 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964835 4583 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964839 4583 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964842 4583 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964846 4583 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964852 4583 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964856 4583 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964860 4583 flags.go:64] FLAG: --cgroup-root="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964863 4583 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964867 4583 flags.go:64] FLAG: --client-ca-file="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964870 4583 flags.go:64] FLAG: --cloud-config="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964874 4583 flags.go:64] FLAG: --cloud-provider="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964878 4583 flags.go:64] FLAG: --cluster-dns="[]" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964883 4583 flags.go:64] FLAG: --cluster-domain="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964886 4583 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964890 4583 flags.go:64] FLAG: --config-dir="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964894 4583 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964898 4583 flags.go:64] FLAG: --container-log-max-files="5" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964902 4583 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964906 4583 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964909 4583 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964913 4583 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964917 4583 flags.go:64] FLAG: --contention-profiling="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964920 4583 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964924 4583 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964928 4583 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964932 4583 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964937 4583 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964940 4583 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964944 4583 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964947 4583 flags.go:64] FLAG: --enable-load-reader="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964951 4583 flags.go:64] FLAG: --enable-server="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964954 4583 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964959 4583 flags.go:64] FLAG: --event-burst="100" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964963 4583 flags.go:64] FLAG: --event-qps="50" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964966 4583 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964970 4583 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964975 4583 flags.go:64] FLAG: --eviction-hard="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964979 4583 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964983 4583 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.964986 4583 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965001 4583 flags.go:64] FLAG: --eviction-soft="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965005 4583 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965009 4583 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965013 4583 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965017 4583 flags.go:64] FLAG: --experimental-mounter-path="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965021 4583 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965025 4583 flags.go:64] FLAG: --fail-swap-on="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965029 4583 flags.go:64] FLAG: --feature-gates="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965033 4583 flags.go:64] FLAG: --file-check-frequency="20s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965037 4583 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965040 4583 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965044 4583 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965048 4583 flags.go:64] FLAG: --healthz-port="10248" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965052 4583 flags.go:64] FLAG: --help="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965055 4583 flags.go:64] FLAG: --hostname-override="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965059 4583 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965063 4583 flags.go:64] FLAG: --http-check-frequency="20s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965067 4583 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965070 4583 flags.go:64] FLAG: --image-credential-provider-config="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965074 4583 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965077 4583 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965080 4583 flags.go:64] FLAG: --image-service-endpoint="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965084 4583 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965087 4583 flags.go:64] FLAG: --kube-api-burst="100" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965091 4583 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965095 4583 flags.go:64] FLAG: --kube-api-qps="50" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965098 4583 flags.go:64] FLAG: --kube-reserved="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965102 4583 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965106 4583 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965109 4583 flags.go:64] FLAG: --kubelet-cgroups="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965113 4583 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965116 4583 flags.go:64] FLAG: --lock-file="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965120 4583 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965124 4583 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965127 4583 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965132 4583 flags.go:64] FLAG: --log-json-split-stream="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965136 4583 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965141 4583 flags.go:64] FLAG: --log-text-split-stream="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965145 4583 flags.go:64] FLAG: --logging-format="text" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965148 4583 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965152 4583 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965156 4583 flags.go:64] FLAG: --manifest-url="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965159 4583 flags.go:64] FLAG: --manifest-url-header="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965167 4583 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965171 4583 flags.go:64] FLAG: --max-open-files="1000000" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965175 4583 flags.go:64] FLAG: --max-pods="110" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965179 4583 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965183 4583 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965187 4583 flags.go:64] FLAG: --memory-manager-policy="None" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965191 4583 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965194 4583 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965198 4583 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965202 4583 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965210 4583 flags.go:64] FLAG: --node-status-max-images="50" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965214 4583 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965218 4583 flags.go:64] FLAG: --oom-score-adj="-999" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965222 4583 flags.go:64] FLAG: --pod-cidr="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965226 4583 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965233 4583 flags.go:64] FLAG: --pod-manifest-path="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965236 4583 flags.go:64] FLAG: --pod-max-pids="-1" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965240 4583 flags.go:64] FLAG: --pods-per-core="0" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965244 4583 flags.go:64] FLAG: --port="10250" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965248 4583 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965252 4583 flags.go:64] FLAG: --provider-id="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965255 4583 flags.go:64] FLAG: --qos-reserved="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965259 4583 flags.go:64] FLAG: --read-only-port="10255" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965263 4583 flags.go:64] FLAG: --register-node="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965267 4583 flags.go:64] FLAG: --register-schedulable="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965270 4583 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965276 4583 flags.go:64] FLAG: --registry-burst="10" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965280 4583 flags.go:64] FLAG: --registry-qps="5" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965284 4583 flags.go:64] FLAG: --reserved-cpus="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965288 4583 flags.go:64] FLAG: --reserved-memory="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965292 4583 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965296 4583 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965300 4583 flags.go:64] FLAG: --rotate-certificates="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965304 4583 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965307 4583 flags.go:64] FLAG: --runonce="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965311 4583 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965315 4583 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965318 4583 flags.go:64] FLAG: --seccomp-default="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965324 4583 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965328 4583 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965331 4583 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965335 4583 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965339 4583 flags.go:64] FLAG: --storage-driver-password="root" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965343 4583 flags.go:64] FLAG: --storage-driver-secure="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965347 4583 flags.go:64] FLAG: --storage-driver-table="stats" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965350 4583 flags.go:64] FLAG: --storage-driver-user="root" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965354 4583 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965358 4583 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965362 4583 flags.go:64] FLAG: --system-cgroups="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965365 4583 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965371 4583 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965376 4583 flags.go:64] FLAG: --tls-cert-file="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965379 4583 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965384 4583 flags.go:64] FLAG: --tls-min-version="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965389 4583 flags.go:64] FLAG: --tls-private-key-file="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965393 4583 flags.go:64] FLAG: --topology-manager-policy="none" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965397 4583 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965401 4583 flags.go:64] FLAG: --topology-manager-scope="container" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965405 4583 flags.go:64] FLAG: --v="2" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965410 4583 flags.go:64] FLAG: --version="false" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965414 4583 flags.go:64] FLAG: --vmodule="" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965418 4583 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965423 4583 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965512 4583 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965517 4583 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965522 4583 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965526 4583 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965539 4583 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965544 4583 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965548 4583 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965553 4583 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965556 4583 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965559 4583 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965563 4583 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965566 4583 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965569 4583 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965572 4583 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965576 4583 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965579 4583 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965582 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965585 4583 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965588 4583 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965592 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965596 4583 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965600 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965604 4583 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965608 4583 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965611 4583 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965615 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965619 4583 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965622 4583 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965626 4583 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965629 4583 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965632 4583 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965635 4583 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965639 4583 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965642 4583 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965645 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965648 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965651 4583 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965655 4583 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965658 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965663 4583 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965666 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965669 4583 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965673 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965676 4583 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965679 4583 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965682 4583 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965685 4583 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965689 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965692 4583 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965695 4583 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965698 4583 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965701 4583 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965704 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965707 4583 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965710 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965713 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965717 4583 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965720 4583 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965723 4583 feature_gate.go:330] unrecognized feature gate: Example Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965726 4583 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965730 4583 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965733 4583 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965736 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965739 4583 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965755 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965758 4583 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965763 4583 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965767 4583 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965771 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965775 4583 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.965779 4583 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.965790 4583 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.972738 4583 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.972769 4583 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972827 4583 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972833 4583 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972837 4583 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972840 4583 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972844 4583 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972848 4583 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972851 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972854 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972857 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972860 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972863 4583 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972868 4583 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972873 4583 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972876 4583 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972880 4583 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972883 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972888 4583 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972893 4583 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972897 4583 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972902 4583 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972906 4583 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972910 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972913 4583 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972916 4583 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972919 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972922 4583 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972926 4583 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972929 4583 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972932 4583 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972935 4583 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972939 4583 feature_gate.go:330] unrecognized feature gate: Example Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972942 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972945 4583 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972948 4583 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972956 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972960 4583 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972963 4583 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972966 4583 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972969 4583 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972973 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972976 4583 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972979 4583 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972982 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972985 4583 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972988 4583 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972991 4583 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972995 4583 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.972998 4583 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973001 4583 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973004 4583 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973007 4583 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973011 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973014 4583 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973017 4583 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973020 4583 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973023 4583 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973027 4583 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973031 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973034 4583 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973037 4583 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973040 4583 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973044 4583 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973047 4583 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973050 4583 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973053 4583 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973056 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973059 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973062 4583 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973065 4583 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973069 4583 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973074 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.973079 4583 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973171 4583 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973176 4583 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973180 4583 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973185 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973189 4583 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973193 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973196 4583 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973199 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973203 4583 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973206 4583 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973209 4583 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973212 4583 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973216 4583 feature_gate.go:330] unrecognized feature gate: Example Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973220 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973223 4583 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973226 4583 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973229 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973232 4583 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973235 4583 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973239 4583 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973242 4583 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973245 4583 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973248 4583 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973252 4583 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973256 4583 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973260 4583 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973264 4583 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973268 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973271 4583 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973275 4583 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973278 4583 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973282 4583 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973285 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973289 4583 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973292 4583 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973296 4583 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973299 4583 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973302 4583 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973305 4583 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973308 4583 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973313 4583 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973317 4583 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973320 4583 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973324 4583 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973327 4583 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973330 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973333 4583 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973336 4583 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973340 4583 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973343 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973346 4583 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973349 4583 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973352 4583 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973355 4583 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973359 4583 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973362 4583 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973365 4583 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973369 4583 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973372 4583 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973375 4583 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973378 4583 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973381 4583 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973384 4583 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973387 4583 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973391 4583 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973394 4583 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973397 4583 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973400 4583 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973403 4583 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973406 4583 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 09 14:58:21 crc kubenswrapper[4583]: W1209 14:58:21.973410 4583 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.973415 4583 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.973500 4583 server.go:940] "Client rotation is on, will bootstrap in background" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.975874 4583 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.975928 4583 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.976784 4583 server.go:997] "Starting client certificate rotation" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.976814 4583 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.977037 4583 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-31 23:25:27.710157052 +0000 UTC Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.977113 4583 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 536h27m5.733046941s for next certificate rotation Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.988441 4583 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.989733 4583 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 09 14:58:21 crc kubenswrapper[4583]: I1209 14:58:21.998508 4583 log.go:25] "Validated CRI v1 runtime API" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.015085 4583 log.go:25] "Validated CRI v1 image API" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.016160 4583 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.019659 4583 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-09-14-55-04-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.019683 4583 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.032899 4583 manager.go:217] Machine: {Timestamp:2025-12-09 14:58:22.031411142 +0000 UTC m=+0.238860250 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445406 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:a7c69146-0151-4a52-a15a-5504218d56ae BootID:30a4b563-0e9e-4808-9382-0ed5dfd7a2e7 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:b8:6a:07 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:b8:6a:07 Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:13:80:e6 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:31:b9:87 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:c2:70:21 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:2d:45:81 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:66:09:cc:57:c3:85 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8e:87:e9:4c:e8:39 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033051 4583 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033131 4583 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033381 4583 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033522 4583 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033545 4583 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033700 4583 topology_manager.go:138] "Creating topology manager with none policy" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.033708 4583 container_manager_linux.go:303] "Creating device plugin manager" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.034010 4583 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.034036 4583 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.034346 4583 state_mem.go:36] "Initialized new in-memory state store" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.034412 4583 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.035946 4583 kubelet.go:418] "Attempting to sync node with API server" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.035964 4583 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.036007 4583 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.036019 4583 kubelet.go:324] "Adding apiserver pod source" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.036031 4583 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.038017 4583 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.038464 4583 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.038912 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.038931 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.038963 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.038979 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.039398 4583 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040261 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040294 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040302 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040309 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040328 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040334 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040342 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040355 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040363 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040371 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040387 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.040395 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.041357 4583 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.041683 4583 server.go:1280] "Started kubelet" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.042229 4583 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.042236 4583 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.042244 4583 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 09 14:58:22 crc systemd[1]: Started Kubernetes Kubelet. Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.042597 4583 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.043632 4583 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.043673 4583 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.043722 4583 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 13:43:15.229767811 +0000 UTC Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.045143 4583 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.045163 4583 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.045245 4583 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.046080 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.046149 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.046588 4583 factory.go:55] Registering systemd factory Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.046678 4583 factory.go:221] Registration of the systemd container factory successfully Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.046653 4583 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.046950 4583 factory.go:153] Registering CRI-O factory Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.047007 4583 factory.go:221] Registration of the crio container factory successfully Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.047099 4583 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.047183 4583 factory.go:103] Registering Raw factory Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.047250 4583 manager.go:1196] Started watching for new ooms in manager Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.047348 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="200ms" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.047433 4583 server.go:460] "Adding debug handlers to kubelet server" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.046874 4583 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.26.88:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187f93feebecb199 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-09 14:58:22.041657753 +0000 UTC m=+0.249106841,LastTimestamp:2025-12-09 14:58:22.041657753 +0000 UTC m=+0.249106841,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.048072 4583 manager.go:319] Starting recovery of all containers Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.056646 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057205 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057228 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057239 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057247 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057274 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057286 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057299 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057310 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057321 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057476 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057513 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057525 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057536 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057546 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057555 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057564 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057630 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057640 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057648 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057674 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057684 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057694 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057702 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057710 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057719 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057730 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057767 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057777 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057785 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057792 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057805 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057814 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057839 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057848 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057856 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057863 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057886 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057896 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057920 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057930 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057938 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057946 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057953 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057961 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057970 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.057978 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.058014 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.058023 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.058031 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.058040 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059857 4583 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059900 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059926 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059939 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059952 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059962 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059971 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059980 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059988 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.059997 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060006 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060016 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060181 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060195 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060204 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060212 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060220 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060229 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060239 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060248 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060257 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060267 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060275 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060284 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060293 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060301 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060310 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060318 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060327 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060335 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060344 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060352 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060983 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.060996 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061006 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061021 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061038 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061072 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061088 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061098 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061108 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061117 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061125 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061134 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061143 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061152 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061161 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061168 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061176 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061184 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061192 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061445 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061483 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061494 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061508 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061519 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061529 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061537 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061546 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061562 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061573 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061584 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061593 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061601 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061609 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061617 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061639 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061647 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061655 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061663 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061670 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061678 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061686 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061694 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061702 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061709 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061717 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061727 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061736 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061761 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061768 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061776 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061784 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061794 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061803 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061811 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061819 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061827 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061835 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061843 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061849 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061857 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061865 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061873 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061881 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061888 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061895 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061903 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061910 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061918 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061926 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061934 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061941 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061949 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061957 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061965 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061973 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061986 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.061993 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.062001 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.062009 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.062017 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063288 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063318 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063330 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063351 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063361 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063370 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063379 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063388 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063396 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063405 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063414 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063423 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063432 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063441 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063450 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063458 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063467 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063482 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063563 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063578 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063586 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063595 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063607 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063618 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063639 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063648 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063656 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063666 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063674 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063683 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063691 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063701 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063710 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063719 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063727 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063735 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063756 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063766 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063785 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063799 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063807 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063816 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063823 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063832 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063840 4583 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063848 4583 reconstruct.go:97] "Volume reconstruction finished" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.063854 4583 reconciler.go:26] "Reconciler: start to sync state" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.072788 4583 manager.go:324] Recovery completed Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.075668 4583 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.077292 4583 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.077378 4583 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.077444 4583 kubelet.go:2335] "Starting kubelet main sync loop" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.077576 4583 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.077934 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.077968 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.084841 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.085812 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.086481 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.086549 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.087265 4583 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.087280 4583 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.087295 4583 state_mem.go:36] "Initialized new in-memory state store" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.091981 4583 policy_none.go:49] "None policy: Start" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.092571 4583 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.092612 4583 state_mem.go:35] "Initializing new in-memory state store" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.143569 4583 manager.go:334] "Starting Device Plugin manager" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.143678 4583 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.143781 4583 server.go:79] "Starting device plugin registration server" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.144219 4583 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.144303 4583 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.144470 4583 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.144553 4583 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.144572 4583 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.151337 4583 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.178836 4583 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.178959 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.180027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.180077 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.180086 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.180212 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.180468 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.180510 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181103 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181116 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181197 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181215 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181251 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181316 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181645 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.181676 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182211 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182238 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182249 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182330 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182444 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182489 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182500 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182523 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182559 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182925 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182958 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.182969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183112 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183193 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183216 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183233 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183249 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183261 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183732 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183774 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183880 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183917 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183926 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.183994 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.184023 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.184553 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.184575 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.184584 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.245428 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.246674 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.246708 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.246718 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.246738 4583 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.247183 4583 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.88:6443: connect: connection refused" node="crc" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.248612 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="400ms" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.266784 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.266820 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.266839 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.266859 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.266877 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.266933 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267015 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267045 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267065 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267098 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267130 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267149 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267172 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267188 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.267205 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.367889 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.367939 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.367957 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.367972 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.367988 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368003 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368021 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368037 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368067 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368083 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368097 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368111 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368128 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368127 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368143 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368161 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368185 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368142 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368183 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368213 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368245 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368246 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368261 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368257 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368267 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368266 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368276 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368274 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368301 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.368295 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.448181 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.449262 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.449369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.449435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.449512 4583 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.449994 4583 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.88:6443: connect: connection refused" node="crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.502200 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.510004 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.523804 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.525253 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-0259089a1f9f989c7e8f8acd5ba522fd079908abdc190539def813a6f008be19 WatchSource:0}: Error finding container 0259089a1f9f989c7e8f8acd5ba522fd079908abdc190539def813a6f008be19: Status 404 returned error can't find the container with id 0259089a1f9f989c7e8f8acd5ba522fd079908abdc190539def813a6f008be19 Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.529176 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a429bb20c25a9cae18116b2bb69adb734047647debdf42d34fe101ff0515d230 WatchSource:0}: Error finding container a429bb20c25a9cae18116b2bb69adb734047647debdf42d34fe101ff0515d230: Status 404 returned error can't find the container with id a429bb20c25a9cae18116b2bb69adb734047647debdf42d34fe101ff0515d230 Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.536231 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-b0b1b5f9c4e0e622a41efa35fa8b7a6eabf1e0502704abe79b19d8d1c8bff3b4 WatchSource:0}: Error finding container b0b1b5f9c4e0e622a41efa35fa8b7a6eabf1e0502704abe79b19d8d1c8bff3b4: Status 404 returned error can't find the container with id b0b1b5f9c4e0e622a41efa35fa8b7a6eabf1e0502704abe79b19d8d1c8bff3b4 Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.539894 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.546694 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.550355 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a0ef78c789b67db4cd51528d1c6fb88cdfb3076fe44a256dcfc2a7f9a5424fe2 WatchSource:0}: Error finding container a0ef78c789b67db4cd51528d1c6fb88cdfb3076fe44a256dcfc2a7f9a5424fe2: Status 404 returned error can't find the container with id a0ef78c789b67db4cd51528d1c6fb88cdfb3076fe44a256dcfc2a7f9a5424fe2 Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.555224 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-41d0cf5867d78795acaf0ab07809a3c5d6a8feb3c34566a741e535ead60eea79 WatchSource:0}: Error finding container 41d0cf5867d78795acaf0ab07809a3c5d6a8feb3c34566a741e535ead60eea79: Status 404 returned error can't find the container with id 41d0cf5867d78795acaf0ab07809a3c5d6a8feb3c34566a741e535ead60eea79 Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.649091 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="800ms" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.850199 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.854366 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.855144 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.855208 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:22 crc kubenswrapper[4583]: I1209 14:58:22.855286 4583 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.855999 4583 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.88:6443: connect: connection refused" node="crc" Dec 09 14:58:22 crc kubenswrapper[4583]: W1209 14:58:22.963547 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:22 crc kubenswrapper[4583]: E1209 14:58:22.963628 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.043534 4583 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.043808 4583 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 05:37:49.838907479 +0000 UTC Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.043872 4583 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 302h39m26.795037876s for next certificate rotation Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.081605 4583 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81" exitCode=0 Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.081679 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.081780 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"41d0cf5867d78795acaf0ab07809a3c5d6a8feb3c34566a741e535ead60eea79"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.081883 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.082556 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.082582 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.082591 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.082920 4583 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="b8c89cb93ddbfb6d04163384314515097a195eaf6e9be70c154528822e6d3006" exitCode=0 Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.082968 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"b8c89cb93ddbfb6d04163384314515097a195eaf6e9be70c154528822e6d3006"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.082983 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a0ef78c789b67db4cd51528d1c6fb88cdfb3076fe44a256dcfc2a7f9a5424fe2"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.083024 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.083534 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.083560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.083568 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.084256 4583 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c" exitCode=0 Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.084299 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.084314 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b0b1b5f9c4e0e622a41efa35fa8b7a6eabf1e0502704abe79b19d8d1c8bff3b4"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.084364 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.085025 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.085047 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.085055 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.086011 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.086042 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0259089a1f9f989c7e8f8acd5ba522fd079908abdc190539def813a6f008be19"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.088533 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb" exitCode=0 Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.088560 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.088576 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a429bb20c25a9cae18116b2bb69adb734047647debdf42d34fe101ff0515d230"} Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.088651 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.089280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.089306 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.089314 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.091783 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.092360 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.092385 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.092394 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:23 crc kubenswrapper[4583]: W1209 14:58:23.223001 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:23 crc kubenswrapper[4583]: E1209 14:58:23.223077 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:23 crc kubenswrapper[4583]: W1209 14:58:23.241861 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:23 crc kubenswrapper[4583]: E1209 14:58:23.241907 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:23 crc kubenswrapper[4583]: E1209 14:58:23.450662 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="1.6s" Dec 09 14:58:23 crc kubenswrapper[4583]: W1209 14:58:23.513062 4583 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 14:58:23 crc kubenswrapper[4583]: E1209 14:58:23.513183 4583 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.88:6443: connect: connection refused" logger="UnhandledError" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.656754 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.657662 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.657701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.657710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:23 crc kubenswrapper[4583]: I1209 14:58:23.657773 4583 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 09 14:58:23 crc kubenswrapper[4583]: E1209 14:58:23.658148 4583 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.88:6443: connect: connection refused" node="crc" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.094825 4583 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15" exitCode=0 Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.094856 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.095003 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.095796 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.095824 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.095853 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.095982 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a6e09a35dde29506f55d43fd46acf63f1baf966cee84c909f52af402d823c535"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.096068 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.096690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.096714 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.096723 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098219 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098242 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098254 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098306 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098886 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.098918 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100271 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100293 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100303 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100350 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100846 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.100876 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.102893 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.102915 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.102926 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.102936 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.102943 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96"} Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.102996 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.103540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.103567 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:24 crc kubenswrapper[4583]: I1209 14:58:24.103577 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.109336 4583 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a" exitCode=0 Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.109442 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.109428 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a"} Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.109665 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.110319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.110346 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.110354 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.110425 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.110475 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.110489 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.258343 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.259127 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.259159 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.259169 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.259193 4583 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.475322 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.475482 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.476256 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.476286 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:25 crc kubenswrapper[4583]: I1209 14:58:25.476294 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.111577 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113468 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113797 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152"} Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113823 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1"} Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113833 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7"} Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113841 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b"} Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113849 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641"} Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.113935 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.114254 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.114282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.114293 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.114461 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.114478 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.114485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.249039 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.429424 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.814012 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.814123 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.814321 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.814801 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.814841 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:26 crc kubenswrapper[4583]: I1209 14:58:26.814852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.114986 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115005 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.114991 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115676 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115699 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115707 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115797 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115800 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115818 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115827 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:27 crc kubenswrapper[4583]: I1209 14:58:27.115805 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:28 crc kubenswrapper[4583]: I1209 14:58:28.010898 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:28 crc kubenswrapper[4583]: I1209 14:58:28.117224 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:28 crc kubenswrapper[4583]: I1209 14:58:28.117876 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:28 crc kubenswrapper[4583]: I1209 14:58:28.117914 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:28 crc kubenswrapper[4583]: I1209 14:58:28.117923 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:29 crc kubenswrapper[4583]: I1209 14:58:29.111980 4583 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 09 14:58:29 crc kubenswrapper[4583]: I1209 14:58:29.112071 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 09 14:58:32 crc kubenswrapper[4583]: E1209 14:58:32.151404 4583 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.406254 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.406450 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.407279 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.407319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.407329 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.578506 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.578643 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.579441 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.579477 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.579487 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:32 crc kubenswrapper[4583]: I1209 14:58:32.582850 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.126421 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.127759 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.127795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.127809 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.130900 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.217771 4583 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.217824 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 09 14:58:33 crc kubenswrapper[4583]: I1209 14:58:33.630920 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.043779 4583 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.069238 4583 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.069280 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.074940 4583 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.074987 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.128869 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.129562 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.129590 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:34 crc kubenswrapper[4583]: I1209 14:58:34.129598 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:35 crc kubenswrapper[4583]: I1209 14:58:35.130832 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:35 crc kubenswrapper[4583]: I1209 14:58:35.131431 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:35 crc kubenswrapper[4583]: I1209 14:58:35.131460 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:35 crc kubenswrapper[4583]: I1209 14:58:35.131470 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:36 crc kubenswrapper[4583]: I1209 14:58:36.227106 4583 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 09 14:58:36 crc kubenswrapper[4583]: I1209 14:58:36.227152 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.015656 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.015793 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.016136 4583 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.016243 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.016412 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.016446 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.016456 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.018595 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.136015 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.136293 4583 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.136330 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.136618 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.136644 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:38 crc kubenswrapper[4583]: I1209 14:58:38.136653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:39 crc kubenswrapper[4583]: E1209 14:58:39.070596 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.072522 4583 trace.go:236] Trace[107304415]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Dec-2025 14:58:25.890) (total time: 13181ms): Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[107304415]: ---"Objects listed" error: 13181ms (14:58:39.072) Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[107304415]: [13.181961495s] [13.181961495s] END Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.072563 4583 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.073944 4583 trace.go:236] Trace[1517221246]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Dec-2025 14:58:25.551) (total time: 13522ms): Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[1517221246]: ---"Objects listed" error: 13522ms (14:58:39.073) Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[1517221246]: [13.522139407s] [13.522139407s] END Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.073962 4583 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 09 14:58:39 crc kubenswrapper[4583]: E1209 14:58:39.075039 4583 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.075142 4583 trace.go:236] Trace[95039641]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Dec-2025 14:58:25.256) (total time: 13818ms): Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[95039641]: ---"Objects listed" error: 13818ms (14:58:39.075) Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[95039641]: [13.818450529s] [13.818450529s] END Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.075157 4583 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.075158 4583 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.075314 4583 trace.go:236] Trace[988683230]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Dec-2025 14:58:25.955) (total time: 13119ms): Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[988683230]: ---"Objects listed" error: 13119ms (14:58:39.075) Dec 09 14:58:39 crc kubenswrapper[4583]: Trace[988683230]: [13.119829476s] [13.119829476s] END Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.075332 4583 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.108240 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:39 crc kubenswrapper[4583]: I1209 14:58:39.110811 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:39 crc kubenswrapper[4583]: E1209 14:58:39.141634 4583 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.046345 4583 apiserver.go:52] "Watching apiserver" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.047775 4583 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.047988 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.048282 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.048500 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.048500 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.048500 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.048543 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.048699 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.048777 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.049093 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.049190 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.049122 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.049264 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.050046 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.050279 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.050424 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.050995 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.051153 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.051782 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.052027 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.065061 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.071972 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.078324 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.087450 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.094555 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.100093 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.105634 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.111094 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.139268 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.140309 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393" exitCode=255 Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.140381 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393"} Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.145298 4583 scope.go:117] "RemoveContainer" containerID="3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.145888 4583 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.146174 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.147136 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.152353 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.160804 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.167121 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.173500 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.181071 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182265 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182364 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182437 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182498 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182567 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182628 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182690 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182774 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182852 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182917 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182982 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183043 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182519 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182568 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182659 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182706 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.182812 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183093 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183242 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183242 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183117 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183462 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183526 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183587 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183651 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183706 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183782 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183843 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183913 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183410 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183551 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183652 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183863 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.183979 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184042 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184062 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184079 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184111 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184129 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184079 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184145 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184161 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184176 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184193 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184209 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184224 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184238 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184251 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184267 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184281 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184295 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184308 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184324 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184339 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184355 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184368 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184386 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184416 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184445 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184461 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184477 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184492 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184504 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184519 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184549 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184562 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184575 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184589 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184604 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184617 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184631 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184645 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184659 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184673 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184688 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184702 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184715 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184729 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184762 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184776 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184790 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184804 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184820 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184834 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184852 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184864 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184878 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184893 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184907 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184920 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184935 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184949 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184965 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184978 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184994 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185008 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185022 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185036 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185054 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185067 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185108 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184168 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185122 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184193 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185137 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184262 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184575 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184642 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184691 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184841 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184911 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184965 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.184992 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185108 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185239 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185374 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185404 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185465 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185671 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185692 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185943 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186218 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186280 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186366 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186432 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186453 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186458 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186472 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186537 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186217 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186581 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186707 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186734 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186893 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186854 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.186247 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187211 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187274 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187316 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.185155 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187857 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187876 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187892 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187906 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187973 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.187988 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188002 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188019 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188033 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188048 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188063 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188077 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188103 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188117 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188130 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188143 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188156 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188169 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188185 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188292 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188307 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188320 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188336 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188350 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188365 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188381 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188396 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188463 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188482 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188497 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188512 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188525 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188540 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188555 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188569 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188584 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188599 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188667 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188684 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188698 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188712 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188727 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188761 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188776 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188792 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188806 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188820 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188836 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188850 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188865 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188879 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188893 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188907 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188922 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188939 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188954 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188970 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188984 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188999 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189015 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189030 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189044 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189063 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189079 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189107 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189122 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189137 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189152 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189167 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189182 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189197 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189213 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189228 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189244 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189259 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189276 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189288 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189298 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189312 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189326 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189347 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189366 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189381 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189396 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189410 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189424 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189438 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189476 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189491 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189488 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189491 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189506 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189533 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189547 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189561 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189581 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189594 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189611 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189626 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189640 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189656 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189672 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189686 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189701 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189716 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189736 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189771 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189775 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189820 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189841 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189858 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189884 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189909 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189929 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189946 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189963 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189981 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.189999 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190058 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190113 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190122 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190131 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190162 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190224 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190236 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190249 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190262 4583 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190272 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190312 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190323 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190319 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190333 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190342 4583 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190350 4583 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190858 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191333 4583 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191368 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191381 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191391 4583 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191516 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191528 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191538 4583 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191547 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191555 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191565 4583 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191610 4583 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191625 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191638 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191646 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191655 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191663 4583 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191671 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191680 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191690 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191699 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191770 4583 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191783 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191793 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191802 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191810 4583 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191819 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191829 4583 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191837 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191845 4583 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191855 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191864 4583 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191872 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191881 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191889 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191896 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191905 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191913 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191923 4583 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191931 4583 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191941 4583 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191949 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191957 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191966 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191977 4583 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190530 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190579 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191990 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192037 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190681 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190795 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190854 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.190933 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191128 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191176 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191225 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.191230 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192154 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.192178 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:40.692164824 +0000 UTC m=+18.899613912 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192209 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192240 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191453 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192438 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192507 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192541 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192706 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192709 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192719 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191708 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191725 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191829 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191834 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191847 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192019 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192096 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191531 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.191399 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.192997 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.193436 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.193659 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.193705 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.193856 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:58:40.693821587 +0000 UTC m=+18.901270675 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.194408 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.194783 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.194786 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.188523 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.194836 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.194919 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.194966 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195266 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195301 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195375 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195492 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195570 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195592 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195655 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195828 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.195899 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.196018 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.196167 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.196260 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.196555 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.196800 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.197067 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.197066 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.197234 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.197277 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.196863 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.197947 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.198212 4583 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.198689 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.198857 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.199490 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.199692 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.199959 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.200173 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.200250 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.201817 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.201925 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:40.701913755 +0000 UTC m=+18.909362843 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.202224 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.202396 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.202966 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206219 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206429 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206468 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206591 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206645 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206809 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.206866 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.207247 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207559 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207577 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207588 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207591 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207600 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207608 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207640 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:40.707632566 +0000 UTC m=+18.915081654 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.207639 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.207770 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.207899 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:40.707890012 +0000 UTC m=+18.915339101 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.207943 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.207994 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208003 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208003 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208151 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208155 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208239 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208300 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208386 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208459 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208578 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208654 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208764 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.208991 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.209211 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.209899 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.210523 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.210690 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.210709 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.210755 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211121 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211227 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211631 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211813 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211896 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211933 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.211968 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.212056 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.212124 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.212166 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.212191 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.212418 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.212647 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216241 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216138 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216310 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216341 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216652 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216793 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.216836 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.217662 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.217820 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.217837 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.217889 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.217962 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218109 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218180 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218199 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218374 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218385 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218392 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218445 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218573 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218757 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218781 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218867 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218876 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.218935 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.219022 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.219271 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.219459 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.219712 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.229314 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.234728 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.236806 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.244525 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.292898 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.292931 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.292992 4583 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293003 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293013 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293020 4583 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293028 4583 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293036 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293043 4583 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293051 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293058 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293066 4583 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293075 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293095 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293103 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293112 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293120 4583 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293061 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293127 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293121 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293163 4583 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293173 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293182 4583 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293209 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293219 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293228 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293235 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293244 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293252 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293260 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293282 4583 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293291 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293298 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293306 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293313 4583 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293320 4583 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293328 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293335 4583 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293343 4583 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293365 4583 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293372 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293380 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293387 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293395 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293402 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293410 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293417 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293439 4583 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293446 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293454 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293461 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293469 4583 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293478 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293487 4583 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293494 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293516 4583 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293524 4583 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293531 4583 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293544 4583 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293552 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293560 4583 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293568 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293595 4583 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293603 4583 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293611 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293618 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293625 4583 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293632 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293640 4583 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293646 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293653 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293675 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293682 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293689 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293698 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293705 4583 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293712 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293720 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293760 4583 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293778 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293785 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293793 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293800 4583 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293810 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293818 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293840 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293847 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293855 4583 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293862 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293870 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293877 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293884 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293892 4583 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293915 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293922 4583 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293930 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293937 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293945 4583 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293953 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293960 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293967 4583 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293974 4583 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.293997 4583 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294004 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294011 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294018 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294026 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294032 4583 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294040 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294047 4583 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294068 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294076 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294095 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294104 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294112 4583 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294120 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294127 4583 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294149 4583 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294157 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294164 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294171 4583 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294180 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294190 4583 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294197 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294204 4583 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294232 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294240 4583 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294248 4583 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294255 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294262 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294269 4583 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294276 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294283 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294311 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294318 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294325 4583 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294333 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294341 4583 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294348 4583 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294355 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294362 4583 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294387 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294395 4583 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.294402 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.358033 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.362783 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 09 14:58:40 crc kubenswrapper[4583]: W1209 14:58:40.365736 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-450bd3c72d684a82d199827258c2f2af3029dcc471832b24f716a1e18e844690 WatchSource:0}: Error finding container 450bd3c72d684a82d199827258c2f2af3029dcc471832b24f716a1e18e844690: Status 404 returned error can't find the container with id 450bd3c72d684a82d199827258c2f2af3029dcc471832b24f716a1e18e844690 Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.367781 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 09 14:58:40 crc kubenswrapper[4583]: W1209 14:58:40.374644 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-cdb7995709396a6d1df05c5ed319d973ccdfa0bca155ca28a4905a87ab3da264 WatchSource:0}: Error finding container cdb7995709396a6d1df05c5ed319d973ccdfa0bca155ca28a4905a87ab3da264: Status 404 returned error can't find the container with id cdb7995709396a6d1df05c5ed319d973ccdfa0bca155ca28a4905a87ab3da264 Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.697135 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.697228 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.697310 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.697316 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:58:41.697290364 +0000 UTC m=+19.904739462 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.697366 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:41.697355346 +0000 UTC m=+19.904804435 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.797730 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.797807 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:40 crc kubenswrapper[4583]: I1209 14:58:40.797825 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.797867 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.797890 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.797909 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.797920 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.797955 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:41.797944195 +0000 UTC m=+20.005393283 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.797891 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.798003 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.798067 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:41.798050597 +0000 UTC m=+20.005499684 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.798104 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:40 crc kubenswrapper[4583]: E1209 14:58:40.798127 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:41.798121079 +0000 UTC m=+20.005570168 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.144058 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.145353 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.145761 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.146055 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c7d166cf644946fbc8140747d8fc059a03a57cc084a251b2dc3e777169f76b06"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.147049 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.147074 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.147098 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"cdb7995709396a6d1df05c5ed319d973ccdfa0bca155ca28a4905a87ab3da264"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.148295 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.148321 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"450bd3c72d684a82d199827258c2f2af3029dcc471832b24f716a1e18e844690"} Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.166220 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.173971 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.183397 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.191371 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.198733 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.205863 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.212650 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.219928 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.227165 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.234867 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.243570 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.252664 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.260621 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.268265 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.275980 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.284630 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:41Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.703297 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.703353 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.703435 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.703486 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:58:43.703464103 +0000 UTC m=+21.910913191 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.703511 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:43.70350456 +0000 UTC m=+21.910953648 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.804492 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.804523 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:41 crc kubenswrapper[4583]: I1209 14:58:41.804544 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804635 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804680 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804713 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804721 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:43.804710665 +0000 UTC m=+22.012159754 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804724 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804638 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804781 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804790 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:43.804777592 +0000 UTC m=+22.012226680 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804792 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:41 crc kubenswrapper[4583]: E1209 14:58:41.804824 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:43.804818579 +0000 UTC m=+22.012267667 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.077950 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.078030 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.078155 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.078304 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.078374 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.078498 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.081352 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.082031 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.083007 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.083540 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.084404 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.084864 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.085704 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.086201 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.087114 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.087593 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.088033 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.088214 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.088951 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.089422 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.090232 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.090678 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.091452 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.091951 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.092300 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.093201 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.093674 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.094103 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.095039 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.095426 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.096100 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.096508 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.097054 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.098053 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.098601 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.099365 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.099868 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.100691 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.101211 4583 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.101307 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.103155 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.103272 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.104004 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.104472 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.105830 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.109616 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.110225 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.111229 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.111851 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.112627 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.113196 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.114093 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.114628 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.115408 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.115902 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.116674 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.117340 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.118136 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.118559 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.118987 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.121217 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.121723 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.123642 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.126091 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.140415 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.151457 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.151580 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.163358 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.173412 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.181387 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.189863 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.198610 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.208214 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.216880 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.224248 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.232251 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.242402 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.275118 4583 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.276343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.276371 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.276380 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.276454 4583 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.282146 4583 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.282308 4583 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.283163 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.283195 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.283204 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.283236 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.283246 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.297183 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.299948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.299988 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.299998 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.300017 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.300027 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.311781 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.314252 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.314280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.314289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.314303 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.314311 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.324265 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.327696 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.327731 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.327739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.327770 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.327778 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.337000 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.339314 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.339343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.339352 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.339363 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.339370 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.350962 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.351109 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.352284 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.352307 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.352315 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.352326 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.352334 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.445871 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.453523 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.453552 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.453560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.453571 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.453579 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.466625 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.468217 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.485700 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.486113 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.509494 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.522185 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.530530 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.540426 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.548559 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.555234 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.555280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.555289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.555314 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.555323 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.558292 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.577504 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.588540 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.597157 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.610212 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.619232 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.627290 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.637291 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.647906 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.656850 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.656885 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.656894 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.656908 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.656918 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.659670 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.759173 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.759210 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.759219 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.759231 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.759240 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.777857 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-ck9w4"] Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.778186 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.779814 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.779860 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-j2sh5"] Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.780126 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.780198 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-jmtvl"] Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.780426 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2vxtt"] Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.780517 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.781069 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.781372 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.781381 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 09 14:58:42 crc kubenswrapper[4583]: W1209 14:58:42.783003 4583 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.783038 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.783052 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 09 14:58:42 crc kubenswrapper[4583]: W1209 14:58:42.783263 4583 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 09 14:58:42 crc kubenswrapper[4583]: E1209 14:58:42.783298 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.783269 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.784140 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.784207 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.786062 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.786123 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.786140 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.786303 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.786385 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.786829 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.792738 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.804894 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.813143 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.823109 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.830933 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.839626 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.860646 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.860675 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.860684 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.860695 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.860703 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.862943 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.879974 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.896650 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.909717 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912208 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfhk2\" (UniqueName: \"kubernetes.io/projected/c240c9e3-573f-4237-b433-deb0f21e0123-kube-api-access-pfhk2\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912237 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-cni-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912253 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-os-release\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912268 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4mk6\" (UniqueName: \"kubernetes.io/projected/41313e81-1231-4cd0-a9b6-8a9d336704de-kube-api-access-f4mk6\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912282 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8knc\" (UniqueName: \"kubernetes.io/projected/e80dcf94-2f3f-4645-9860-2865a33ee9ad-kube-api-access-k8knc\") pod \"node-resolver-ck9w4\" (UID: \"e80dcf94-2f3f-4645-9860-2865a33ee9ad\") " pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912295 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-netns\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912308 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c240c9e3-573f-4237-b433-deb0f21e0123-multus-daemon-config\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912322 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-etc-kubernetes\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912383 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e80dcf94-2f3f-4645-9860-2865a33ee9ad-hosts-file\") pod \"node-resolver-ck9w4\" (UID: \"e80dcf94-2f3f-4645-9860-2865a33ee9ad\") " pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912421 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7ce1d933-c482-444a-86c2-3e9f1315eb16-proxy-tls\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912442 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-socket-dir-parent\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912460 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/41313e81-1231-4cd0-a9b6-8a9d336704de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912490 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7ce1d933-c482-444a-86c2-3e9f1315eb16-mcd-auth-proxy-config\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912506 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-k8s-cni-cncf-io\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912522 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-cni-multus\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912538 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-hostroot\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912554 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-os-release\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912597 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-system-cni-dir\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912650 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-system-cni-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912674 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-cnibin\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912693 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912710 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7ce1d933-c482-444a-86c2-3e9f1315eb16-rootfs\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912722 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-cni-bin\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912769 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c240c9e3-573f-4237-b433-deb0f21e0123-cni-binary-copy\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912796 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-conf-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912840 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-cnibin\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912855 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/41313e81-1231-4cd0-a9b6-8a9d336704de-cni-binary-copy\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912888 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-kubelet\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912923 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-multus-certs\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.912966 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sj2c\" (UniqueName: \"kubernetes.io/projected/7ce1d933-c482-444a-86c2-3e9f1315eb16-kube-api-access-2sj2c\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.925895 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.938637 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.948386 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.956627 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.962562 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.962592 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.962601 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.962613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.962621 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:42Z","lastTransitionTime":"2025-12-09T14:58:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.968846 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.978397 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:42 crc kubenswrapper[4583]: I1209 14:58:42.986273 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.000007 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.009874 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013588 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-kubelet\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013685 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-multus-certs\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013777 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sj2c\" (UniqueName: \"kubernetes.io/projected/7ce1d933-c482-444a-86c2-3e9f1315eb16-kube-api-access-2sj2c\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013863 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfhk2\" (UniqueName: \"kubernetes.io/projected/c240c9e3-573f-4237-b433-deb0f21e0123-kube-api-access-pfhk2\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013936 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-cni-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014001 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-os-release\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014058 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-os-release\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013717 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-multus-certs\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014059 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4mk6\" (UniqueName: \"kubernetes.io/projected/41313e81-1231-4cd0-a9b6-8a9d336704de-kube-api-access-f4mk6\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014128 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e80dcf94-2f3f-4645-9860-2865a33ee9ad-hosts-file\") pod \"node-resolver-ck9w4\" (UID: \"e80dcf94-2f3f-4645-9860-2865a33ee9ad\") " pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014144 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8knc\" (UniqueName: \"kubernetes.io/projected/e80dcf94-2f3f-4645-9860-2865a33ee9ad-kube-api-access-k8knc\") pod \"node-resolver-ck9w4\" (UID: \"e80dcf94-2f3f-4645-9860-2865a33ee9ad\") " pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014056 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-cni-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.013693 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-kubelet\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014162 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-netns\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014176 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-netns\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014214 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e80dcf94-2f3f-4645-9860-2865a33ee9ad-hosts-file\") pod \"node-resolver-ck9w4\" (UID: \"e80dcf94-2f3f-4645-9860-2865a33ee9ad\") " pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014217 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c240c9e3-573f-4237-b433-deb0f21e0123-multus-daemon-config\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014268 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-etc-kubernetes\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014292 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7ce1d933-c482-444a-86c2-3e9f1315eb16-proxy-tls\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014320 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7ce1d933-c482-444a-86c2-3e9f1315eb16-mcd-auth-proxy-config\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014335 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-socket-dir-parent\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014349 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/41313e81-1231-4cd0-a9b6-8a9d336704de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014370 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-k8s-cni-cncf-io\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014383 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-cni-multus\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014396 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-hostroot\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014412 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-system-cni-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014444 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-cnibin\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014444 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-etc-kubernetes\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014456 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-os-release\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014492 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-os-release\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014508 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-system-cni-dir\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014531 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014548 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7ce1d933-c482-444a-86c2-3e9f1315eb16-rootfs\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014563 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-cni-bin\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014595 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c240c9e3-573f-4237-b433-deb0f21e0123-cni-binary-copy\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014609 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-conf-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014624 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-cnibin\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014638 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/41313e81-1231-4cd0-a9b6-8a9d336704de-cni-binary-copy\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014802 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c240c9e3-573f-4237-b433-deb0f21e0123-multus-daemon-config\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014848 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-cni-bin\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014874 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-system-cni-dir\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014938 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-hostroot\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014958 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7ce1d933-c482-444a-86c2-3e9f1315eb16-mcd-auth-proxy-config\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.014990 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-run-k8s-cni-cncf-io\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015011 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-system-cni-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015017 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-host-var-lib-cni-multus\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015030 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7ce1d933-c482-444a-86c2-3e9f1315eb16-rootfs\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015040 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-conf-dir\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015062 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-cnibin\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015073 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/41313e81-1231-4cd0-a9b6-8a9d336704de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015098 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-cnibin\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015237 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/41313e81-1231-4cd0-a9b6-8a9d336704de-cni-binary-copy\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015286 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/41313e81-1231-4cd0-a9b6-8a9d336704de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015303 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c240c9e3-573f-4237-b433-deb0f21e0123-multus-socket-dir-parent\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.015423 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c240c9e3-573f-4237-b433-deb0f21e0123-cni-binary-copy\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.018385 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7ce1d933-c482-444a-86c2-3e9f1315eb16-proxy-tls\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.028788 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8knc\" (UniqueName: \"kubernetes.io/projected/e80dcf94-2f3f-4645-9860-2865a33ee9ad-kube-api-access-k8knc\") pod \"node-resolver-ck9w4\" (UID: \"e80dcf94-2f3f-4645-9860-2865a33ee9ad\") " pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.029281 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfhk2\" (UniqueName: \"kubernetes.io/projected/c240c9e3-573f-4237-b433-deb0f21e0123-kube-api-access-pfhk2\") pod \"multus-j2sh5\" (UID: \"c240c9e3-573f-4237-b433-deb0f21e0123\") " pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.029852 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4mk6\" (UniqueName: \"kubernetes.io/projected/41313e81-1231-4cd0-a9b6-8a9d336704de-kube-api-access-f4mk6\") pod \"multus-additional-cni-plugins-2vxtt\" (UID: \"41313e81-1231-4cd0-a9b6-8a9d336704de\") " pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.036594 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.044929 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.052510 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.061002 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.064651 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.064683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.064693 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.064707 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.064716 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.088309 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-ck9w4" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.095266 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j2sh5" Dec 09 14:58:43 crc kubenswrapper[4583]: W1209 14:58:43.102740 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc240c9e3_573f_4237_b433_deb0f21e0123.slice/crio-66d26abe13fc38bd7d72b5c2942b9834d308aed485c8c6a24442e3e452205865 WatchSource:0}: Error finding container 66d26abe13fc38bd7d72b5c2942b9834d308aed485c8c6a24442e3e452205865: Status 404 returned error can't find the container with id 66d26abe13fc38bd7d72b5c2942b9834d308aed485c8c6a24442e3e452205865 Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.155999 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5rdc6"] Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.156640 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.158065 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.159103 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.159465 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.160276 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerStarted","Data":"66d26abe13fc38bd7d72b5c2942b9834d308aed485c8c6a24442e3e452205865"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.168169 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-ck9w4" event={"ID":"e80dcf94-2f3f-4645-9860-2865a33ee9ad","Type":"ContainerStarted","Data":"b582e53b112f86161eaa0b65607b637fee56d14f36f336ab8bc5f54fd9dfad45"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.172065 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.172106 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.172116 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.172129 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.172138 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.175543 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.175573 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.175803 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.175830 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.185981 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.198833 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.209286 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216494 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216530 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-netns\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216553 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-systemd-units\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216586 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovn-node-metrics-cert\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216607 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-script-lib\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216621 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54w5r\" (UniqueName: \"kubernetes.io/projected/2acdc1e0-3af8-41bf-958f-3cd291da42db-kube-api-access-54w5r\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216635 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-slash\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216669 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-node-log\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216683 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-ovn-kubernetes\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216759 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-log-socket\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216785 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-bin\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216801 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-ovn\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216835 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216852 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216899 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-etc-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216913 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-netd\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216928 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-env-overrides\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216942 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-kubelet\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216955 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-systemd\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.216985 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-var-lib-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.217000 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-config\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.228543 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.238339 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.252556 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.265256 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.273599 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.274281 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.274311 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.274320 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.274332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.274341 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.305229 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318110 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-var-lib-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318142 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-config\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318170 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-netns\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318188 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-systemd-units\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318202 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovn-node-metrics-cert\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318216 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54w5r\" (UniqueName: \"kubernetes.io/projected/2acdc1e0-3af8-41bf-958f-3cd291da42db-kube-api-access-54w5r\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318231 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-script-lib\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318246 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-slash\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318274 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-node-log\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318287 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-ovn-kubernetes\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318301 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-log-socket\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318321 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-bin\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318339 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-ovn\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318353 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318376 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318403 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-etc-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318416 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-kubelet\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318429 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-systemd\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318442 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-netd\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318454 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-env-overrides\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318963 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-env-overrides\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318970 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-slash\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.318944 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-ovn-kubernetes\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319017 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319035 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-bin\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319108 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-ovn\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319113 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319143 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-kubelet\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319144 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-log-socket\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319157 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-etc-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319188 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-systemd\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319208 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-netd\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319229 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-var-lib-openvswitch\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319251 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-netns\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319305 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-systemd-units\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319320 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-node-log\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319509 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-script-lib\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.319599 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-config\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.321201 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovn-node-metrics-cert\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.350761 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54w5r\" (UniqueName: \"kubernetes.io/projected/2acdc1e0-3af8-41bf-958f-3cd291da42db-kube-api-access-54w5r\") pod \"ovnkube-node-5rdc6\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.367718 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.376268 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.376296 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.376307 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.376319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.376336 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.405293 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.445412 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.472487 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.477488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.477587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.477658 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.477717 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.477792 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: W1209 14:58:43.481767 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2acdc1e0_3af8_41bf_958f_3cd291da42db.slice/crio-5da86cc75f95cab353e389b07674cc2d85c9f9aceba024d15907ce3d5ee7025f WatchSource:0}: Error finding container 5da86cc75f95cab353e389b07674cc2d85c9f9aceba024d15907ce3d5ee7025f: Status 404 returned error can't find the container with id 5da86cc75f95cab353e389b07674cc2d85c9f9aceba024d15907ce3d5ee7025f Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.485574 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:43Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.579667 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.579699 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.579707 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.579719 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.579728 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.681067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.681108 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.681119 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.681131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.681139 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.722691 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.722783 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.722858 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.722888 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:58:47.722863934 +0000 UTC m=+25.930313022 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.722915 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:47.722908869 +0000 UTC m=+25.930357957 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.782420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.782446 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.782454 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.782465 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.782474 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.823336 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.823368 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.823414 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823483 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823501 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823511 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823523 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823528 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823535 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823540 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823542 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:47.823529508 +0000 UTC m=+26.030978597 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823612 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:47.823601584 +0000 UTC m=+26.031050663 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:43 crc kubenswrapper[4583]: E1209 14:58:43.823644 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:47.823618888 +0000 UTC m=+26.031067976 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.884902 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.884938 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.884954 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.884970 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.884979 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.986806 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.986843 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.986852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.986865 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.986873 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:43Z","lastTransitionTime":"2025-12-09T14:58:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:43 crc kubenswrapper[4583]: I1209 14:58:43.999390 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.002162 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" Dec 09 14:58:44 crc kubenswrapper[4583]: W1209 14:58:44.010790 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41313e81_1231_4cd0_a9b6_8a9d336704de.slice/crio-a5bb98592cef7a3db2709e755551bdf14dfbca4495cc13461d02bfe0ac49b4b5 WatchSource:0}: Error finding container a5bb98592cef7a3db2709e755551bdf14dfbca4495cc13461d02bfe0ac49b4b5: Status 404 returned error can't find the container with id a5bb98592cef7a3db2709e755551bdf14dfbca4495cc13461d02bfe0ac49b4b5 Dec 09 14:58:44 crc kubenswrapper[4583]: E1209 14:58:44.026702 4583 projected.go:288] Couldn't get configMap openshift-machine-config-operator/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 09 14:58:44 crc kubenswrapper[4583]: E1209 14:58:44.026725 4583 projected.go:194] Error preparing data for projected volume kube-api-access-2sj2c for pod openshift-machine-config-operator/machine-config-daemon-jmtvl: failed to sync configmap cache: timed out waiting for the condition Dec 09 14:58:44 crc kubenswrapper[4583]: E1209 14:58:44.026788 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7ce1d933-c482-444a-86c2-3e9f1315eb16-kube-api-access-2sj2c podName:7ce1d933-c482-444a-86c2-3e9f1315eb16 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:44.526773082 +0000 UTC m=+22.734222170 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-2sj2c" (UniqueName: "kubernetes.io/projected/7ce1d933-c482-444a-86c2-3e9f1315eb16-kube-api-access-2sj2c") pod "machine-config-daemon-jmtvl" (UID: "7ce1d933-c482-444a-86c2-3e9f1315eb16") : failed to sync configmap cache: timed out waiting for the condition Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.078699 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.078710 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:44 crc kubenswrapper[4583]: E1209 14:58:44.078981 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:44 crc kubenswrapper[4583]: E1209 14:58:44.078910 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.078723 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:44 crc kubenswrapper[4583]: E1209 14:58:44.079043 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.088948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.088976 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.088984 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.088994 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.089002 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.171314 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerStarted","Data":"a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.171353 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerStarted","Data":"a5bb98592cef7a3db2709e755551bdf14dfbca4495cc13461d02bfe0ac49b4b5"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.173554 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerStarted","Data":"1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.174641 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-ck9w4" event={"ID":"e80dcf94-2f3f-4645-9860-2865a33ee9ad","Type":"ContainerStarted","Data":"963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.175639 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" exitCode=0 Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.175664 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.175679 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"5da86cc75f95cab353e389b07674cc2d85c9f9aceba024d15907ce3d5ee7025f"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.180812 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.190369 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.192664 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.192690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.192697 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.192709 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.192718 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.197954 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.206880 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.216827 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.229846 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.238764 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.239816 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.251148 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.259013 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.267699 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.275458 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.283086 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.290604 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.295153 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.295181 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.295190 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.295230 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.295245 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.298762 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.307178 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.320462 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.329706 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.338395 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.353703 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.361736 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.369895 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.396843 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.396869 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.396878 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.396890 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.396898 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.409771 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.445937 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.485200 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.497986 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.498105 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.498187 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.498271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.498349 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.525439 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.528742 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sj2c\" (UniqueName: \"kubernetes.io/projected/7ce1d933-c482-444a-86c2-3e9f1315eb16-kube-api-access-2sj2c\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.531036 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sj2c\" (UniqueName: \"kubernetes.io/projected/7ce1d933-c482-444a-86c2-3e9f1315eb16-kube-api-access-2sj2c\") pod \"machine-config-daemon-jmtvl\" (UID: \"7ce1d933-c482-444a-86c2-3e9f1315eb16\") " pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.566872 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.600298 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.600324 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.600349 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.600364 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.600371 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.604538 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.606379 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: W1209 14:58:44.612767 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ce1d933_c482_444a_86c2_3e9f1315eb16.slice/crio-2c112e6912a124a06bf845414e5ee05e79321e00ac78e96c387ea47179863d8b WatchSource:0}: Error finding container 2c112e6912a124a06bf845414e5ee05e79321e00ac78e96c387ea47179863d8b: Status 404 returned error can't find the container with id 2c112e6912a124a06bf845414e5ee05e79321e00ac78e96c387ea47179863d8b Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.646694 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.702340 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.702369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.702379 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.702391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.702400 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.804374 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.804406 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.804416 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.804430 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.804439 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.906030 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.906069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.906088 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.906102 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:44 crc kubenswrapper[4583]: I1209 14:58:44.906112 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:44Z","lastTransitionTime":"2025-12-09T14:58:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.007694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.007725 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.007734 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.007762 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.007805 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.109126 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.109292 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.109301 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.109314 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.109323 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.180487 4583 generic.go:334] "Generic (PLEG): container finished" podID="41313e81-1231-4cd0-a9b6-8a9d336704de" containerID="a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64" exitCode=0 Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.180548 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerDied","Data":"a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.182034 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.182061 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.182072 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"2c112e6912a124a06bf845414e5ee05e79321e00ac78e96c387ea47179863d8b"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.185861 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.185897 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.185909 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.185917 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.185924 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.185932 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.191600 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.200455 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.209030 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.212490 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.212514 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.212523 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.212533 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.212541 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.219391 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.233390 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.241500 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.248831 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.272942 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.294651 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.310944 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.314496 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.314527 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.314540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.314552 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.314561 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.322850 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.330719 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.339069 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.348336 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.356908 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.365216 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.373619 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.382336 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.404050 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.416793 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.416826 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.416835 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.416849 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.416858 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.446276 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.484009 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.518864 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.518894 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.518903 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.518915 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.518922 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.525475 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.565680 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.610425 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.621000 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.621029 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.621036 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.621049 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.621059 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.645325 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.684115 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.723239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.723273 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.723282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.723294 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.723302 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.724684 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.769170 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:45Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.825632 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.825668 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.825676 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.825691 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.825703 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.927276 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.927317 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.927329 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.927344 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:45 crc kubenswrapper[4583]: I1209 14:58:45.927353 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:45Z","lastTransitionTime":"2025-12-09T14:58:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.028815 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.028846 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.028855 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.028867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.028876 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.077873 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.077917 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:46 crc kubenswrapper[4583]: E1209 14:58:46.077957 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.077873 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:46 crc kubenswrapper[4583]: E1209 14:58:46.078012 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:46 crc kubenswrapper[4583]: E1209 14:58:46.078083 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.130219 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.130248 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.130256 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.130268 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.130276 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.188802 4583 generic.go:334] "Generic (PLEG): container finished" podID="41313e81-1231-4cd0-a9b6-8a9d336704de" containerID="0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11" exitCode=0 Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.188828 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerDied","Data":"0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.199344 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.210016 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.218869 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.228733 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.231520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.231541 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.231549 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.231560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.231567 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.236852 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.248680 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.255957 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.264202 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.271636 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.279483 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.288022 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.301572 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.314385 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.324609 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:46Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.335357 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.335383 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.335392 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.335403 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.335412 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.437038 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.437070 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.437089 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.437115 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.437131 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.539520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.539547 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.539555 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.539568 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.539576 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.641173 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.641206 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.641215 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.641230 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.641238 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.742592 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.742621 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.742629 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.742642 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.742650 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.844036 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.844084 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.844095 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.844109 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.844120 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.945952 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.945987 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.945996 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.946009 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:46 crc kubenswrapper[4583]: I1209 14:58:46.946018 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:46Z","lastTransitionTime":"2025-12-09T14:58:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.047348 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.047376 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.047384 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.047397 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.047406 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.148577 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.148601 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.148609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.148621 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.148629 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.193833 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.195351 4583 generic.go:334] "Generic (PLEG): container finished" podID="41313e81-1231-4cd0-a9b6-8a9d336704de" containerID="a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d" exitCode=0 Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.195387 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerDied","Data":"a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.205815 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.216669 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.225859 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.234979 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.245356 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.250307 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.250343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.250351 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.250365 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.250376 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.257780 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.265930 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.273678 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.281650 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.288264 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.296609 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.305631 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.317247 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.325288 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.351807 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.351833 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.351841 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.351852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.351860 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.395267 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-p54bl"] Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.395552 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.396958 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.397103 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.397156 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.397207 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.403325 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.410332 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.418804 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.427205 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.441124 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.449809 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.454031 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.454060 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.454069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.454089 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.454097 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.457543 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.465035 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.476334 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.484213 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.491948 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.499248 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.506385 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.523190 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.552087 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/011d6421-0c34-4f35-9ead-cb4189a28d2a-host\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.552113 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/011d6421-0c34-4f35-9ead-cb4189a28d2a-serviceca\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.552136 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k4pf\" (UniqueName: \"kubernetes.io/projected/011d6421-0c34-4f35-9ead-cb4189a28d2a-kube-api-access-7k4pf\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.556380 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.556412 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.556421 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.556436 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.556446 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.566324 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:47Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.652762 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/011d6421-0c34-4f35-9ead-cb4189a28d2a-host\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.652789 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/011d6421-0c34-4f35-9ead-cb4189a28d2a-serviceca\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.652816 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k4pf\" (UniqueName: \"kubernetes.io/projected/011d6421-0c34-4f35-9ead-cb4189a28d2a-kube-api-access-7k4pf\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.652844 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/011d6421-0c34-4f35-9ead-cb4189a28d2a-host\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.654394 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/011d6421-0c34-4f35-9ead-cb4189a28d2a-serviceca\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.657661 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.657689 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.657698 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.657710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.657718 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.667248 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k4pf\" (UniqueName: \"kubernetes.io/projected/011d6421-0c34-4f35-9ead-cb4189a28d2a-kube-api-access-7k4pf\") pod \"node-ca-p54bl\" (UID: \"011d6421-0c34-4f35-9ead-cb4189a28d2a\") " pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.753487 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.753571 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.753628 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:58:55.753615672 +0000 UTC m=+33.961064761 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.753690 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.753757 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:55.753733615 +0000 UTC m=+33.961182704 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.759058 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.759092 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.759100 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.759111 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.759119 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.781373 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-p54bl" Dec 09 14:58:47 crc kubenswrapper[4583]: W1209 14:58:47.795168 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod011d6421_0c34_4f35_9ead_cb4189a28d2a.slice/crio-a32844ea15f4eee25a4b4f34cd469126667d2feed7a86702f22d81fc9bbf1a17 WatchSource:0}: Error finding container a32844ea15f4eee25a4b4f34cd469126667d2feed7a86702f22d81fc9bbf1a17: Status 404 returned error can't find the container with id a32844ea15f4eee25a4b4f34cd469126667d2feed7a86702f22d81fc9bbf1a17 Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.854603 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.854643 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.854664 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854817 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854846 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854859 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854863 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854904 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:55.854891191 +0000 UTC m=+34.062340279 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854921 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:55.854911318 +0000 UTC m=+34.062360397 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854817 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854939 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854947 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:47 crc kubenswrapper[4583]: E1209 14:58:47.854988 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:55.854982122 +0000 UTC m=+34.062431210 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.860414 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.860435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.860443 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.860454 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.860483 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.962289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.962316 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.962324 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.962335 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:47 crc kubenswrapper[4583]: I1209 14:58:47.962344 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:47Z","lastTransitionTime":"2025-12-09T14:58:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.064545 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.064582 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.064594 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.064608 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.064617 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.077788 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.077804 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.077817 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:48 crc kubenswrapper[4583]: E1209 14:58:48.077877 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:48 crc kubenswrapper[4583]: E1209 14:58:48.077941 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:48 crc kubenswrapper[4583]: E1209 14:58:48.078007 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.166104 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.166131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.166139 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.166150 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.166159 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.199223 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-p54bl" event={"ID":"011d6421-0c34-4f35-9ead-cb4189a28d2a","Type":"ContainerStarted","Data":"8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.199252 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-p54bl" event={"ID":"011d6421-0c34-4f35-9ead-cb4189a28d2a","Type":"ContainerStarted","Data":"a32844ea15f4eee25a4b4f34cd469126667d2feed7a86702f22d81fc9bbf1a17"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.202086 4583 generic.go:334] "Generic (PLEG): container finished" podID="41313e81-1231-4cd0-a9b6-8a9d336704de" containerID="4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b" exitCode=0 Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.202302 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerDied","Data":"4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.208874 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.216851 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.225806 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.252870 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.267656 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.267681 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.267689 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.267701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.267708 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.279535 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.290312 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.299835 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.311501 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.319449 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.327114 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.334939 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.342486 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.350260 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.359422 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.366333 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.369029 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.369058 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.369068 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.369086 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.369095 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.373390 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.383476 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.391628 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.400348 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.406594 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.425242 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.465678 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.470951 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.470971 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.470979 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.470990 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.470998 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.509499 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.548780 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.572994 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.573023 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.573031 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.573042 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.573050 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.585214 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.623840 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.667033 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.674995 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.675091 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.675160 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.675229 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.675290 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.705674 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.745527 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.777507 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.777540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.777548 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.777561 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.777569 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.785619 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:48Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.879683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.879716 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.879724 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.879739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.879770 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.981420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.981449 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.981457 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.981468 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:48 crc kubenswrapper[4583]: I1209 14:58:48.981477 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:48Z","lastTransitionTime":"2025-12-09T14:58:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.083391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.083426 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.083434 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.083446 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.083454 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.185533 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.185560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.185568 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.185579 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.185590 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.209311 4583 generic.go:334] "Generic (PLEG): container finished" podID="41313e81-1231-4cd0-a9b6-8a9d336704de" containerID="09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a" exitCode=0 Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.209360 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerDied","Data":"09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.214015 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.214240 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.214264 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.223611 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.231877 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.232561 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.233139 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.241263 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.249846 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.256263 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.264246 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.273836 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.287349 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.287373 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.287382 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.287394 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.287403 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.288431 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.296659 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.305092 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.314155 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.323553 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.333981 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.344740 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.384421 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.389917 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.389948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.389957 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.389969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.389977 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.425587 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.464599 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.491640 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.491673 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.491682 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.491694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.491702 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.505571 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.544672 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.585129 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.593782 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.593819 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.593829 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.593842 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.593850 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.626628 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.670484 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.696345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.696443 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.696501 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.696582 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.696646 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.708964 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.745707 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.784358 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.798631 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.798663 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.798674 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.798688 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.798696 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.826892 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.864422 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.900639 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.900668 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.900678 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.900691 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.900698 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:49Z","lastTransitionTime":"2025-12-09T14:58:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.905200 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.944907 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:49 crc kubenswrapper[4583]: I1209 14:58:49.985110 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:49Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.002491 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.002514 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.002522 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.002534 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.002542 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.078509 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:50 crc kubenswrapper[4583]: E1209 14:58:50.078599 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.078648 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.078682 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:50 crc kubenswrapper[4583]: E1209 14:58:50.078765 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:50 crc kubenswrapper[4583]: E1209 14:58:50.078810 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.104235 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.104268 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.104277 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.104289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.104301 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.206249 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.206278 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.206285 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.206295 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.206303 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.220644 4583 generic.go:334] "Generic (PLEG): container finished" podID="41313e81-1231-4cd0-a9b6-8a9d336704de" containerID="3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101" exitCode=0 Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.220728 4583 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.223309 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerDied","Data":"3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.235230 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.246948 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.256289 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.265248 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.273267 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.280776 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.289897 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.307612 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.308304 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.308332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.308340 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.308357 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.308366 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.348585 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.386667 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.410687 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.410721 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.410732 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.410760 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.410770 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.425838 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.471592 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.506994 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.512779 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.512823 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.512836 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.512858 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.512872 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.551664 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.585574 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:50Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.615171 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.615213 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.615224 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.615240 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.615251 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.716867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.716909 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.716918 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.716934 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.716946 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.818521 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.818876 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.818885 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.818899 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.818909 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.920421 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.920462 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.920474 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.920490 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:50 crc kubenswrapper[4583]: I1209 14:58:50.920501 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:50Z","lastTransitionTime":"2025-12-09T14:58:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.022703 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.022774 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.022785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.022801 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.022810 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.123876 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.123908 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.123920 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.123934 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.123948 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.225034 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.225077 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.225087 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.225100 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.225110 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.225869 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" event={"ID":"41313e81-1231-4cd0-a9b6-8a9d336704de","Type":"ContainerStarted","Data":"843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.227173 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/0.log" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.229219 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420" exitCode=1 Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.229247 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.229640 4583 scope.go:117] "RemoveContainer" containerID="b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.238794 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.248107 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.255674 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.264558 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.276719 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.293437 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.304035 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.312419 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.326932 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.326970 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.326994 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.327012 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.327023 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.326964 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.337348 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.347803 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.355939 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.365038 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.373691 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.384875 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.393871 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.401861 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.409170 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.418499 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.426965 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.429485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.429517 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.429526 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.429541 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.429552 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.435676 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.466930 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.514282 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.531652 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.531689 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.531699 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.531713 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.531723 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.546854 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.585183 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.626420 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.634086 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.634123 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.634135 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.634149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.634157 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.664393 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.705246 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.736943 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.736976 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.736984 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.736997 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.737006 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.746712 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.789114 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"message\\\":\\\":58:50.610570 5895 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611153 5895 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611156 5895 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1209 14:58:50.611326 5895 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1209 14:58:50.611356 5895 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1209 14:58:50.611379 5895 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1209 14:58:50.611399 5895 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1209 14:58:50.611417 5895 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1209 14:58:50.611452 5895 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1209 14:58:50.611470 5895 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1209 14:58:50.611500 5895 factory.go:656] Stopping watch factory\\\\nI1209 14:58:50.611526 5895 ovnkube.go:599] Stopped ovnkube\\\\nI1209 14:58:50.611536 5895 handler.go:208] Removed *v1.Node event handler 2\\\\nI1209 14:58:50.611479 5895 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1209 14:58:50.611544 5895 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:51Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.839035 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.839097 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.839110 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.839126 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.839139 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.940719 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.940770 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.940780 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.940793 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:51 crc kubenswrapper[4583]: I1209 14:58:51.940803 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:51Z","lastTransitionTime":"2025-12-09T14:58:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.042084 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.042124 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.042132 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.042149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.042159 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.078203 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.078271 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.078293 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.078202 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.078368 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.078504 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.088824 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.097668 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.104868 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.116725 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.124415 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.132953 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.144468 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.144498 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.144507 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.144520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.144529 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.145958 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.175238 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.195293 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"message\\\":\\\":58:50.610570 5895 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611153 5895 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611156 5895 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1209 14:58:50.611326 5895 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1209 14:58:50.611356 5895 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1209 14:58:50.611379 5895 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1209 14:58:50.611399 5895 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1209 14:58:50.611417 5895 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1209 14:58:50.611452 5895 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1209 14:58:50.611470 5895 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1209 14:58:50.611500 5895 factory.go:656] Stopping watch factory\\\\nI1209 14:58:50.611526 5895 ovnkube.go:599] Stopped ovnkube\\\\nI1209 14:58:50.611536 5895 handler.go:208] Removed *v1.Node event handler 2\\\\nI1209 14:58:50.611479 5895 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1209 14:58:50.611544 5895 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.205482 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.224893 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.233300 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/1.log" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.233730 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/0.log" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.235716 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f" exitCode=1 Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.235766 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.235813 4583 scope.go:117] "RemoveContainer" containerID="b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.236213 4583 scope.go:117] "RemoveContainer" containerID="b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f" Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.236349 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.246056 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.246095 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.246104 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.246117 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.246125 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.266435 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.306276 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.345835 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.348053 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.348091 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.348100 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.348114 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.348123 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.385349 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.427105 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.449606 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.449651 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.449662 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.449680 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.449689 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.465546 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.506568 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.545201 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.551450 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.551478 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.551487 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.551500 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.551510 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.575820 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.575866 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.575878 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.575894 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.575905 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.585010 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.586328 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.587481 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.587513 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.587541 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.587555 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.587566 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.595449 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.598235 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.598260 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.598269 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.598283 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.598291 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.606154 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.608825 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.608866 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.608879 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.608903 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.608917 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.617119 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.619888 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.619923 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.619934 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.619968 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.619980 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.623984 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.628276 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: E1209 14:58:52.628379 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.653429 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.653455 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.653464 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.653478 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.653505 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.669993 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.705519 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.744933 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.755409 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.755440 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.755449 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.755485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.755496 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.784775 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.824770 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.857386 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.857419 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.857432 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.857448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.857458 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.865319 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.906534 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.949277 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"message\\\":\\\":58:50.610570 5895 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611153 5895 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611156 5895 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1209 14:58:50.611326 5895 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1209 14:58:50.611356 5895 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1209 14:58:50.611379 5895 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1209 14:58:50.611399 5895 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1209 14:58:50.611417 5895 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1209 14:58:50.611452 5895 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1209 14:58:50.611470 5895 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1209 14:58:50.611500 5895 factory.go:656] Stopping watch factory\\\\nI1209 14:58:50.611526 5895 ovnkube.go:599] Stopped ovnkube\\\\nI1209 14:58:50.611536 5895 handler.go:208] Removed *v1.Node event handler 2\\\\nI1209 14:58:50.611479 5895 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1209 14:58:50.611544 5895 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.959835 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.959870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.959880 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.959896 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.959906 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:52Z","lastTransitionTime":"2025-12-09T14:58:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:52 crc kubenswrapper[4583]: I1209 14:58:52.984923 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.065104 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.065140 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.065150 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.065166 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.065179 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.167592 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.167625 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.167635 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.167645 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.167653 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.220515 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.234194 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.239867 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/1.log" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.242662 4583 scope.go:117] "RemoveContainer" containerID="b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f" Dec 09 14:58:53 crc kubenswrapper[4583]: E1209 14:58:53.242810 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.243534 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.254733 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.263278 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.269284 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.269316 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.269326 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.269343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.269353 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.271642 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.281874 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.291724 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.308938 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b86271fdbd150e5b9a8049c79439767861ff3ae8b240b526ee5009ee492d2420\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"message\\\":\\\":58:50.610570 5895 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611153 5895 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1209 14:58:50.611156 5895 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1209 14:58:50.611326 5895 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1209 14:58:50.611356 5895 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1209 14:58:50.611379 5895 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1209 14:58:50.611399 5895 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1209 14:58:50.611417 5895 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1209 14:58:50.611452 5895 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1209 14:58:50.611470 5895 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1209 14:58:50.611500 5895 factory.go:656] Stopping watch factory\\\\nI1209 14:58:50.611526 5895 ovnkube.go:599] Stopped ovnkube\\\\nI1209 14:58:50.611536 5895 handler.go:208] Removed *v1.Node event handler 2\\\\nI1209 14:58:50.611479 5895 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1209 14:58:50.611544 5895 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.345919 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.370605 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.370635 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.370643 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.370658 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.370668 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.387601 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.425907 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.467146 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.472521 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.472549 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.472557 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.472570 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.472578 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.506461 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.545785 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.575102 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.575136 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.575145 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.575161 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.575172 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.586938 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.626987 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.665551 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.677154 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.677189 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.677198 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.677215 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.677224 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.707731 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.745157 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.779336 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.779369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.779381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.779396 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.779406 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.784219 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.824895 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.866468 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.881389 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.881419 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.881428 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.881440 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.881449 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.907149 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.945475 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.983489 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.983520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.983531 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.983545 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.983555 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:53Z","lastTransitionTime":"2025-12-09T14:58:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:53 crc kubenswrapper[4583]: I1209 14:58:53.987666 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:53Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.026627 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.071236 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.078421 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.078449 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.078421 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:54 crc kubenswrapper[4583]: E1209 14:58:54.078528 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:54 crc kubenswrapper[4583]: E1209 14:58:54.078588 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:54 crc kubenswrapper[4583]: E1209 14:58:54.078633 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.085158 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.085184 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.085192 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.085203 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.085212 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.105705 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.148732 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.185151 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.186460 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.186490 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.186498 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.186511 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.186519 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.288622 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.288652 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.288662 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.288677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.288687 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.390375 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.390410 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.390418 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.390432 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.390440 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.492141 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.492254 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.492313 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.492387 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.492438 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.594275 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.594450 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.594518 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.594573 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.594629 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.695971 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.696007 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.696016 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.696030 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.696038 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.798015 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.798055 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.798078 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.798091 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.798100 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.816304 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778"] Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.816634 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.818807 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.819669 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.827987 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.837432 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.844887 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.853169 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.863141 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.872886 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.879977 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.896451 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.899790 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.899830 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.899846 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.899870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.899886 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:54Z","lastTransitionTime":"2025-12-09T14:58:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.905794 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.912070 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3a6bc1c9-15f6-4c24-844f-06978e663477-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.912197 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3a6bc1c9-15f6-4c24-844f-06978e663477-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.912305 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3a6bc1c9-15f6-4c24-844f-06978e663477-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.912381 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kdtr\" (UniqueName: \"kubernetes.io/projected/3a6bc1c9-15f6-4c24-844f-06978e663477-kube-api-access-4kdtr\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.916047 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.925374 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.932839 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.941952 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.951181 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.959497 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:54 crc kubenswrapper[4583]: I1209 14:58:54.973088 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.001592 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.001625 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.001635 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.001650 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.001661 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.013458 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3a6bc1c9-15f6-4c24-844f-06978e663477-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.013513 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kdtr\" (UniqueName: \"kubernetes.io/projected/3a6bc1c9-15f6-4c24-844f-06978e663477-kube-api-access-4kdtr\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.013627 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3a6bc1c9-15f6-4c24-844f-06978e663477-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.013663 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3a6bc1c9-15f6-4c24-844f-06978e663477-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.014111 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3a6bc1c9-15f6-4c24-844f-06978e663477-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.014317 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3a6bc1c9-15f6-4c24-844f-06978e663477-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.019877 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3a6bc1c9-15f6-4c24-844f-06978e663477-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.027045 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kdtr\" (UniqueName: \"kubernetes.io/projected/3a6bc1c9-15f6-4c24-844f-06978e663477-kube-api-access-4kdtr\") pod \"ovnkube-control-plane-749d76644c-lk778\" (UID: \"3a6bc1c9-15f6-4c24-844f-06978e663477\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.104328 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.104374 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.104385 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.104406 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.104430 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.124856 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" Dec 09 14:58:55 crc kubenswrapper[4583]: W1209 14:58:55.134880 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a6bc1c9_15f6_4c24_844f_06978e663477.slice/crio-483a6a18a4615a5d775b2ef84b946115769908b34267f8293a8c26eb3e9d7a7b WatchSource:0}: Error finding container 483a6a18a4615a5d775b2ef84b946115769908b34267f8293a8c26eb3e9d7a7b: Status 404 returned error can't find the container with id 483a6a18a4615a5d775b2ef84b946115769908b34267f8293a8c26eb3e9d7a7b Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.206966 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.207001 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.207011 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.207028 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.207038 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.248291 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" event={"ID":"3a6bc1c9-15f6-4c24-844f-06978e663477","Type":"ContainerStarted","Data":"483a6a18a4615a5d775b2ef84b946115769908b34267f8293a8c26eb3e9d7a7b"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.308481 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.308511 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.308520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.308533 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.308543 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.411001 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.411030 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.411040 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.411051 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.411068 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.513041 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.513081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.513090 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.513104 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.513114 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.615133 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.615164 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.615172 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.615183 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.615191 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.716567 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.716596 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.716604 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.716615 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.716623 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.818189 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.818351 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.818365 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.818377 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.818387 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.820665 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.820820 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.820832 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:59:11.820817874 +0000 UTC m=+50.028266961 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.820889 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.820933 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:11.820924656 +0000 UTC m=+50.028373744 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.920480 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.920513 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.920522 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.920533 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.920541 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:55Z","lastTransitionTime":"2025-12-09T14:58:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.921827 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.921868 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:55 crc kubenswrapper[4583]: I1209 14:58:55.921885 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.921976 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.921997 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922005 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922012 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922017 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922022 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.921977 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922056 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:11.922042324 +0000 UTC m=+50.129491412 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922083 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:11.9220771 +0000 UTC m=+50.129526188 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:58:55 crc kubenswrapper[4583]: E1209 14:58:55.922101 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:11.922089583 +0000 UTC m=+50.129538671 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.021943 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.021968 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.021975 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.021987 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.021994 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.078255 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.078277 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.078289 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.078346 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.078412 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.078473 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.123925 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.123953 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.123962 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.123972 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.123980 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.208236 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-9j9s6"] Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.208579 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.208623 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.216926 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.225387 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.225731 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.225779 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.225789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.225802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.225810 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.235172 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.241923 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.249644 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.252075 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" event={"ID":"3a6bc1c9-15f6-4c24-844f-06978e663477","Type":"ContainerStarted","Data":"3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.252105 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" event={"ID":"3a6bc1c9-15f6-4c24-844f-06978e663477","Type":"ContainerStarted","Data":"2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.259434 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.267026 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.273109 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.280683 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.288276 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.295869 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.301991 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.309857 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.318860 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.323997 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rldnn\" (UniqueName: \"kubernetes.io/projected/b5848176-adf1-47d0-b043-1b305357d808-kube-api-access-rldnn\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.324049 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.327960 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.327988 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.327997 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.328008 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.328017 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.331623 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.343284 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.350388 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.361225 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.369046 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.375909 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.385005 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.391597 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.399596 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.405804 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.417799 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.424475 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rldnn\" (UniqueName: \"kubernetes.io/projected/b5848176-adf1-47d0-b043-1b305357d808-kube-api-access-rldnn\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.424525 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.424632 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.424682 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:56.924667716 +0000 UTC m=+35.132116814 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.426086 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.429921 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.429947 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.429957 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.429969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.429976 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.433863 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.436273 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rldnn\" (UniqueName: \"kubernetes.io/projected/b5848176-adf1-47d0-b043-1b305357d808-kube-api-access-rldnn\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.442979 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.449508 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.458345 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.467630 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.474732 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.486315 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.494029 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:58:56Z is after 2025-08-24T17:21:41Z" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.531461 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.531493 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.531506 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.531520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.531530 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.632802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.632835 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.632844 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.632855 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.632864 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.734387 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.734409 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.734417 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.734427 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.734434 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.836232 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.836258 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.836265 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.836274 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.836281 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.928133 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.928280 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:56 crc kubenswrapper[4583]: E1209 14:58:56.928330 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:57.928317785 +0000 UTC m=+36.135766874 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.938210 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.938241 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.938249 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.938263 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:56 crc kubenswrapper[4583]: I1209 14:58:56.938271 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:56Z","lastTransitionTime":"2025-12-09T14:58:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.039976 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.040004 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.040012 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.040022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.040029 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.141559 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.141587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.141595 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.141609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.141617 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.243169 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.243195 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.243204 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.243220 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.243227 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.345189 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.345224 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.345234 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.345246 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.345255 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.447420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.447448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.447457 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.447469 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.447477 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.549313 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.549348 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.549357 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.549369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.549378 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.650915 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.650948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.650956 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.650967 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.650974 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.752828 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.752859 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.752871 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.752883 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.752892 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.854614 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.854646 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.854654 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.854666 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.854676 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.936464 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:57 crc kubenswrapper[4583]: E1209 14:58:57.936583 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:57 crc kubenswrapper[4583]: E1209 14:58:57.936626 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 14:58:59.936614028 +0000 UTC m=+38.144063116 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.955994 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.956022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.956035 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.956050 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:57 crc kubenswrapper[4583]: I1209 14:58:57.956066 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:57Z","lastTransitionTime":"2025-12-09T14:58:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.058026 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.058064 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.058073 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.058083 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.058091 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.077908 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.077951 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.077959 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.077911 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:58 crc kubenswrapper[4583]: E1209 14:58:58.077991 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:58:58 crc kubenswrapper[4583]: E1209 14:58:58.078052 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:58:58 crc kubenswrapper[4583]: E1209 14:58:58.078115 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:58:58 crc kubenswrapper[4583]: E1209 14:58:58.078173 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.159905 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.159935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.159943 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.159953 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.159962 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.261009 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.261049 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.261067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.261079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.261088 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.362499 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.362530 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.362538 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.362551 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.362558 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.464232 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.464263 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.464271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.464282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.464290 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.566475 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.566503 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.566511 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.566544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.566552 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.668328 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.668374 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.668383 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.668393 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.668399 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.770287 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.770315 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.770323 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.770350 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.770358 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.872138 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.872177 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.872185 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.872197 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.872207 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.973676 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.973714 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.973722 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.973740 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:58 crc kubenswrapper[4583]: I1209 14:58:58.973768 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:58Z","lastTransitionTime":"2025-12-09T14:58:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.075455 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.075488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.075496 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.075532 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.075543 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.177039 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.177078 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.177089 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.177100 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.177109 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.278623 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.278680 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.278692 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.278705 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.278714 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.380710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.380766 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.380776 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.380787 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.380795 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.482628 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.482658 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.482666 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.482677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.482686 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.584820 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.584867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.584876 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.584888 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.584896 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.686306 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.686332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.686341 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.686352 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.686360 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.787942 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.787989 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.787999 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.788009 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.788016 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.889962 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.889993 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.890002 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.890014 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.890022 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.952800 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:58:59 crc kubenswrapper[4583]: E1209 14:58:59.952923 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:59 crc kubenswrapper[4583]: E1209 14:58:59.952967 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:03.952955207 +0000 UTC m=+42.160404305 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.991838 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.991868 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.991878 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.991889 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:58:59 crc kubenswrapper[4583]: I1209 14:58:59.991897 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:58:59Z","lastTransitionTime":"2025-12-09T14:58:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.077683 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.077705 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.077716 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:00 crc kubenswrapper[4583]: E1209 14:59:00.077835 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.077892 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:00 crc kubenswrapper[4583]: E1209 14:59:00.078016 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:00 crc kubenswrapper[4583]: E1209 14:59:00.078166 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:00 crc kubenswrapper[4583]: E1209 14:59:00.078178 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.093393 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.093424 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.093437 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.093449 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.093456 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.195811 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.195836 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.195844 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.195855 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.195862 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.297316 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.297345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.297354 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.297383 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.297392 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.398977 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.399005 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.399029 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.399041 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.399049 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.500444 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.500476 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.500484 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.500496 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.500506 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.602238 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.602285 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.602294 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.602307 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.602315 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.704156 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.704205 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.704216 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.704227 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.704234 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.805787 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.805817 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.805825 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.805853 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.805861 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.907402 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.907452 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.907464 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.907475 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:00 crc kubenswrapper[4583]: I1209 14:59:00.907485 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:00Z","lastTransitionTime":"2025-12-09T14:59:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.009028 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.009059 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.009068 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.009076 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.009085 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.110347 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.110396 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.110408 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.110421 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.110429 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.211485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.211515 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.211524 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.211537 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.211555 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.313435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.313464 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.313473 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.313485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.313508 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.415023 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.415062 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.415072 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.415083 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.415094 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.516694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.516720 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.516728 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.516737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.516766 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.618783 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.618810 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.618818 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.618827 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.618834 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.720623 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.720653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.720661 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.720671 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.720678 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.822421 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.822454 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.822463 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.822474 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.822482 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.924251 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.924273 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.924280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.924289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:01 crc kubenswrapper[4583]: I1209 14:59:01.924296 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:01Z","lastTransitionTime":"2025-12-09T14:59:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.025548 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.025568 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.025575 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.025583 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.025596 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.078123 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.078200 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.078284 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.078310 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.078343 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.078427 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.078503 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.078557 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.086177 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.119089 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.126884 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.126924 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.126934 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.126949 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.126959 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.136842 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.146779 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.155444 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.164975 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.170962 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.178403 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.184950 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.192720 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.200790 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.212662 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.220425 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.228022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.228069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.228080 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.228092 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.228102 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.228164 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.241680 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.248705 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.256313 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.329969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.329996 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.330005 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.330027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.330035 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.431507 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.431549 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.431558 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.431570 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.431578 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.533622 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.533646 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.533655 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.533664 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.533672 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.635377 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.635400 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.635407 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.635416 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.635423 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.736636 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.736674 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.736683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.736694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.736702 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.838088 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.838106 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.838114 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.838125 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.838132 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.844149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.844171 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.844179 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.844189 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.844197 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.852320 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.854552 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.854579 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.854589 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.854599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.854606 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.862130 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.864825 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.864914 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.864975 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.865043 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.865128 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.875193 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.877827 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.877858 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.877866 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.877895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.877904 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.887197 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.889415 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.889487 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.889549 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.889604 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.889668 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.897674 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:02Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:02 crc kubenswrapper[4583]: E1209 14:59:02.897919 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.939782 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.939832 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.939860 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.939870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:02 crc kubenswrapper[4583]: I1209 14:59:02.939877 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:02Z","lastTransitionTime":"2025-12-09T14:59:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.041769 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.041797 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.041805 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.041815 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.041822 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.143532 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.143560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.143568 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.143577 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.143585 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.246144 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.246199 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.246210 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.246223 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.246232 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.347648 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.347736 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.347797 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.347827 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.347841 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.450112 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.450166 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.450176 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.450190 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.450200 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.552202 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.552266 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.552278 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.552289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.552298 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.654197 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.654246 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.654259 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.654277 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.654289 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.757019 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.757069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.757081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.757092 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.757099 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.859027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.859223 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.859291 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.859370 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.859433 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.961920 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.961954 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.961962 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.961975 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.961983 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:03Z","lastTransitionTime":"2025-12-09T14:59:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:03 crc kubenswrapper[4583]: I1209 14:59:03.982348 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:03 crc kubenswrapper[4583]: E1209 14:59:03.982496 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:59:03 crc kubenswrapper[4583]: E1209 14:59:03.982551 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:11.982534489 +0000 UTC m=+50.189983578 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.063316 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.063345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.063376 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.063386 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.063393 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.078212 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.078215 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:04 crc kubenswrapper[4583]: E1209 14:59:04.078454 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.078258 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:04 crc kubenswrapper[4583]: E1209 14:59:04.078518 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.078215 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:04 crc kubenswrapper[4583]: E1209 14:59:04.078374 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:04 crc kubenswrapper[4583]: E1209 14:59:04.078557 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.165956 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.166023 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.166037 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.166067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.166107 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.268873 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.269068 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.269096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.269123 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.269139 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.370927 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.370955 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.370963 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.370991 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.370998 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.473381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.473413 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.473423 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.473449 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.473458 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.576026 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.576073 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.576083 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.576099 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.576111 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.678907 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.678958 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.678969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.678979 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.678987 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.780861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.780892 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.780901 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.780911 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.780939 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.883187 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.883228 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.883239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.883255 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.883264 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.985504 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.985551 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.985560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.985573 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:04 crc kubenswrapper[4583]: I1209 14:59:04.985582 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:04Z","lastTransitionTime":"2025-12-09T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.087471 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.087589 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.087673 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.087766 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.087839 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.189356 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.189403 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.189413 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.189424 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.189433 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.291079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.291122 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.291131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.291145 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.291154 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.393078 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.393112 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.393122 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.393134 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.393143 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.495288 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.495332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.495342 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.495356 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.495367 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.597595 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.597635 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.597644 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.597660 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.597669 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.699773 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.699826 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.699837 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.699848 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.699858 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.801855 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.801898 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.801908 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.801922 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.801932 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.904002 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.904069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.904080 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.904096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:05 crc kubenswrapper[4583]: I1209 14:59:05.904107 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:05Z","lastTransitionTime":"2025-12-09T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.006181 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.006235 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.006246 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.006270 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.006283 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.078059 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.078087 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.078092 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:06 crc kubenswrapper[4583]: E1209 14:59:06.078189 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.078235 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:06 crc kubenswrapper[4583]: E1209 14:59:06.078300 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:06 crc kubenswrapper[4583]: E1209 14:59:06.078361 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:06 crc kubenswrapper[4583]: E1209 14:59:06.078434 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.108035 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.108067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.108075 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.108085 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.108095 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.210350 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.210373 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.210382 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.210391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.210398 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.312503 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.312553 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.312563 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.312576 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.312585 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.414702 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.414769 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.414779 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.414792 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.414799 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.516692 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.516730 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.516759 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.516777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.516786 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.618952 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.618980 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.618988 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.619022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.619031 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.721028 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.721073 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.721082 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.721095 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.721105 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.822598 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.822629 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.822639 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.822652 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.822659 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.924358 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.924394 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.924402 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.924414 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:06 crc kubenswrapper[4583]: I1209 14:59:06.924422 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:06Z","lastTransitionTime":"2025-12-09T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.026242 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.026275 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.026283 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.026295 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.026303 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.078358 4583 scope.go:117] "RemoveContainer" containerID="b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.127861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.127893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.127903 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.127916 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.127925 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.229890 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.229924 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.229956 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.229970 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.229978 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.278585 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/1.log" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.281435 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.281555 4583 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.291341 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.303031 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.311270 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.322835 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.331500 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.331531 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.331540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.331556 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.331567 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.336616 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.345113 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.351906 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.360459 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.368087 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.375915 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.382805 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.390840 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.399477 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.412050 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.424268 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.432330 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.433651 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.433682 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.433690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.433703 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.433713 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.457485 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.535626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.535664 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.535676 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.535690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.535699 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.637341 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.637380 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.637389 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.637403 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.637412 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.739190 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.739230 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.739239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.739254 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.739264 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.840971 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.841003 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.841034 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.841056 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.841066 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.945487 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.945531 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.945546 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.945560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:07 crc kubenswrapper[4583]: I1209 14:59:07.945570 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:07Z","lastTransitionTime":"2025-12-09T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.047227 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.047271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.047282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.047297 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.047306 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.077717 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.077781 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.077795 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:08 crc kubenswrapper[4583]: E1209 14:59:08.077874 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.077736 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:08 crc kubenswrapper[4583]: E1209 14:59:08.077991 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:08 crc kubenswrapper[4583]: E1209 14:59:08.078127 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:08 crc kubenswrapper[4583]: E1209 14:59:08.078226 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.149367 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.149396 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.149404 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.149415 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.149424 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.251143 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.251174 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.251182 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.251192 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.251200 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.284972 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/2.log" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.285535 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/1.log" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.287538 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26" exitCode=1 Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.287569 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.287602 4583 scope.go:117] "RemoveContainer" containerID="b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.288255 4583 scope.go:117] "RemoveContainer" containerID="3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26" Dec 09 14:59:08 crc kubenswrapper[4583]: E1209 14:59:08.288462 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.296992 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.304904 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.312834 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.319718 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.328581 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.334948 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.341024 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.349576 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.352613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.352641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.352670 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.352683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.352697 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.361579 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.369333 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.376329 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.383811 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.389934 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.397650 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.409442 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2f882bf6e27c0743e7a4f536cd4a46e80664c2308da6c8f94899a39ac9fe49f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"message\\\":\\\"}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835080 6059 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-apiserver/apiserver\\\\\\\"}\\\\nI1209 14:58:51.835080 6059 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler/scheduler]} name:Service_openshift-kube-scheduler/scheduler_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.169:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1209 14:58:51.835098 6059 services_controller.go:360] Finished syncing service apiserver on namespace openshift-kube-apiserver for network=default : 831.823µs\\\\nI1209 14:58:51.835111 6059 services_controller.go:356] Processing sync for service openshift-dns/dns-default for network=default\\\\nF1209 14:58:51.835112 6059 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.416308 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.423991 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.454369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.454399 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.454408 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.454421 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.454431 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.556273 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.556309 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.556320 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.556332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.556343 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.657943 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.657971 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.657980 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.657993 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.658001 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.759574 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.759601 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.759609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.759638 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.759647 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.861874 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.861904 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.861915 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.861927 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.861936 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.887257 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.964170 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.964197 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.964224 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.964234 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:08 crc kubenswrapper[4583]: I1209 14:59:08.964241 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:08Z","lastTransitionTime":"2025-12-09T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.066061 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.066090 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.066099 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.066113 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.066123 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.168013 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.168057 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.168068 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.168080 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.168089 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.269246 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.269440 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.269448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.269458 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.269467 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.291680 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/2.log" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.294605 4583 scope.go:117] "RemoveContainer" containerID="3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26" Dec 09 14:59:09 crc kubenswrapper[4583]: E1209 14:59:09.294733 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.303666 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.310703 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.319728 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.325792 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.333394 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.340660 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.348365 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.354327 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.362442 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.369908 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.370854 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.370883 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.370893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.370905 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.370915 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.378528 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.384865 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.392826 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.401016 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.413137 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.425017 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.431944 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.473172 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.473217 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.473226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.473238 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.473246 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.574729 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.574780 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.574791 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.574804 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.574813 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.676605 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.676633 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.676641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.676653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.676662 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.778728 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.778770 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.778779 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.778789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.778797 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.880642 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.880667 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.880675 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.880683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.880689 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.982320 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.982345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.982354 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.982365 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:09 crc kubenswrapper[4583]: I1209 14:59:09.982376 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:09Z","lastTransitionTime":"2025-12-09T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.077811 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.077828 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.077860 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.077885 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:10 crc kubenswrapper[4583]: E1209 14:59:10.077891 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:10 crc kubenswrapper[4583]: E1209 14:59:10.077968 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:10 crc kubenswrapper[4583]: E1209 14:59:10.078064 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:10 crc kubenswrapper[4583]: E1209 14:59:10.078117 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.083898 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.083922 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.083930 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.083939 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.083947 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.185137 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.185164 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.185172 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.185182 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.185189 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.286973 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.286998 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.287006 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.287016 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.287022 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.388564 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.388591 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.388599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.388637 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.388657 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.490432 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.490457 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.490465 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.490473 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.490480 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.592413 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.592480 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.592490 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.592499 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.592506 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.693944 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.693972 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.693981 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.693991 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.693999 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.795687 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.795707 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.795715 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.795724 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.795730 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.897841 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.897879 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.897888 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.897902 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:10 crc kubenswrapper[4583]: I1209 14:59:10.897911 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:10Z","lastTransitionTime":"2025-12-09T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.000092 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.000141 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.000152 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.000164 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.000172 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.102200 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.102224 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.102233 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.102241 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.102280 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.204069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.204112 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.204121 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.204138 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.204149 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.306142 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.306179 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.306188 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.306201 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.306212 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.408202 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.408243 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.408253 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.408267 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.408277 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.509565 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.509591 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.509599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.509611 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.509621 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.611690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.611720 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.611728 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.611739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.611766 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.713922 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.713959 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.713969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.713982 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.713991 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.816444 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.816477 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.816487 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.816498 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.816506 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.846707 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.846829 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 14:59:43.846807151 +0000 UTC m=+82.054256249 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.846958 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.847066 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.847112 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:43.847104112 +0000 UTC m=+82.054553210 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.918226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.918258 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.918267 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.918278 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.918286 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:11Z","lastTransitionTime":"2025-12-09T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.947664 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.947702 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:11 crc kubenswrapper[4583]: I1209 14:59:11.947721 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947804 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947828 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947840 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947848 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947858 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947863 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947906 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947892 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:43.947878252 +0000 UTC m=+82.155327350 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947948 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:43.947937324 +0000 UTC m=+82.155386422 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:59:11 crc kubenswrapper[4583]: E1209 14:59:11.947961 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:43.947954606 +0000 UTC m=+82.155403694 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.019458 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.019485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.019493 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.019502 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.019509 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.048375 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:12 crc kubenswrapper[4583]: E1209 14:59:12.048457 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:59:12 crc kubenswrapper[4583]: E1209 14:59:12.048495 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 14:59:28.048484935 +0000 UTC m=+66.255934023 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.077998 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.078031 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:12 crc kubenswrapper[4583]: E1209 14:59:12.078112 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.078157 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.078176 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:12 crc kubenswrapper[4583]: E1209 14:59:12.078246 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:12 crc kubenswrapper[4583]: E1209 14:59:12.078425 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:12 crc kubenswrapper[4583]: E1209 14:59:12.078505 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.087024 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.096129 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.108050 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.116225 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.120710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.120739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.120762 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.120772 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.120779 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.123562 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.131606 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.137833 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.149583 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.156832 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.166784 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.174546 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.182049 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.189364 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.195870 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.204675 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.210836 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.217017 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.222154 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.222175 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.222185 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.222198 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.222207 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.324160 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.324220 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.324231 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.324244 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.324253 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.427062 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.427089 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.427097 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.427110 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.427119 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.528893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.528925 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.528933 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.528945 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.528955 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.630836 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.630871 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.630880 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.630893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.630902 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.732635 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.732806 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.732876 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.732940 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.733002 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.834454 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.834497 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.834508 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.834523 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.834533 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.936543 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.936575 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.936584 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.936626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:12 crc kubenswrapper[4583]: I1209 14:59:12.936643 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:12Z","lastTransitionTime":"2025-12-09T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.038721 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.038769 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.038784 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.038795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.038804 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.140269 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.140293 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.140303 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.140311 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.140318 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.241366 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.241385 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.241392 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.241401 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.241407 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.262965 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.262984 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.262991 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.262999 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.263006 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: E1209 14:59:13.271333 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.273694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.273727 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.273736 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.273763 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.273772 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: E1209 14:59:13.282416 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.284679 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.284704 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.284713 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.284723 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.284730 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: E1209 14:59:13.293839 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.296455 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.296491 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.296501 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.296513 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.296521 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: E1209 14:59:13.304707 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.306908 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.306935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.306943 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.306954 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.306960 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: E1209 14:59:13.314668 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:13 crc kubenswrapper[4583]: E1209 14:59:13.314782 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.342761 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.342794 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.342803 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.342814 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.342823 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.444491 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.444513 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.444522 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.444532 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.444542 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.546111 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.546142 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.546151 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.546162 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.546170 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.647538 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.647576 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.647587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.647601 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.647610 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.749112 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.749141 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.749149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.749160 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.749168 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.851360 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.851398 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.851408 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.851420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.851430 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.953254 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.953289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.953298 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.953311 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:13 crc kubenswrapper[4583]: I1209 14:59:13.953320 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:13Z","lastTransitionTime":"2025-12-09T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.055257 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.055290 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.055299 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.055310 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.055318 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.077880 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:14 crc kubenswrapper[4583]: E1209 14:59:14.077968 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.078012 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.078076 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:14 crc kubenswrapper[4583]: E1209 14:59:14.078118 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.078165 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:14 crc kubenswrapper[4583]: E1209 14:59:14.078246 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:14 crc kubenswrapper[4583]: E1209 14:59:14.078338 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.157342 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.157371 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.157381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.157393 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.157402 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.259342 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.259374 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.259382 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.259394 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.259402 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.361348 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.361381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.361389 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.361401 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.361410 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.463271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.463299 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.463307 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.463318 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.463326 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.565209 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.565241 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.565252 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.565265 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.565274 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.667096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.667131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.667140 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.667153 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.667163 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.768469 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.768494 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.768502 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.768512 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.768521 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.870148 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.870268 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.870345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.870411 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.870469 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.971696 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.971734 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.971766 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.971777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:14 crc kubenswrapper[4583]: I1209 14:59:14.971785 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:14Z","lastTransitionTime":"2025-12-09T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.073915 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.073952 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.073962 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.073970 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.073978 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.175700 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.175831 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.175910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.175963 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.176018 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.277148 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.277173 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.277183 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.277194 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.277203 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.378444 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.378479 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.378490 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.378501 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.378511 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.479484 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.479515 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.479524 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.479535 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.479544 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.479665 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.485785 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.491014 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.498539 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.506258 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.513516 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.521056 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.528307 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.534540 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.541369 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.548662 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.554506 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.562234 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.570116 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.581345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.581394 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.581415 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.581431 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.581440 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.582807 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.591619 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.602764 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.609726 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.617214 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.683571 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.683611 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.683620 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.683635 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.683643 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.785349 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.785379 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.785390 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.785402 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.785411 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.886848 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.886936 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.886993 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.887081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.887136 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.988462 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.988499 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.988512 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.988527 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:15 crc kubenswrapper[4583]: I1209 14:59:15.988535 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:15Z","lastTransitionTime":"2025-12-09T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.078141 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.078181 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:16 crc kubenswrapper[4583]: E1209 14:59:16.078276 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.078316 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.078348 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:16 crc kubenswrapper[4583]: E1209 14:59:16.078445 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:16 crc kubenswrapper[4583]: E1209 14:59:16.078544 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:16 crc kubenswrapper[4583]: E1209 14:59:16.078578 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.090282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.090310 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.090319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.090328 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.090337 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.191741 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.191792 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.191801 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.191811 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.191818 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.293868 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.293896 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.293904 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.293913 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.293921 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.395734 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.395775 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.395785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.395796 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.395803 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.497619 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.497664 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.497674 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.497685 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.497693 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.599682 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.599710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.599718 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.599727 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.599734 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.701412 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.701438 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.701448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.701458 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.701464 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.803513 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.803574 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.803585 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.803595 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.803602 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.904695 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.904730 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.904765 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.904778 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:16 crc kubenswrapper[4583]: I1209 14:59:16.904787 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:16Z","lastTransitionTime":"2025-12-09T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.006672 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.006699 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.006706 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.006716 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.006724 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.107795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.107816 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.107823 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.107832 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.107840 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.209789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.209817 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.209825 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.209838 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.209845 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.311816 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.311850 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.311860 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.311872 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.311881 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.413627 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.413653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.413663 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.413673 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.413681 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.515659 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.515685 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.515694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.515704 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.515711 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.617656 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.617682 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.617690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.617699 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.617706 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.719733 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.719777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.719786 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.719795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.719802 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.821433 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.821467 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.821475 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.821488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.821498 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.923010 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.923050 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.923060 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.923071 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:17 crc kubenswrapper[4583]: I1209 14:59:17.923079 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:17Z","lastTransitionTime":"2025-12-09T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.025278 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.025317 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.025330 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.025343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.025354 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.077836 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.077888 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:18 crc kubenswrapper[4583]: E1209 14:59:18.077916 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:18 crc kubenswrapper[4583]: E1209 14:59:18.077980 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.078016 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.078041 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:18 crc kubenswrapper[4583]: E1209 14:59:18.078091 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:18 crc kubenswrapper[4583]: E1209 14:59:18.078172 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.126969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.126991 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.127000 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.127009 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.127016 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.228790 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.228819 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.228828 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.228838 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.228845 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.330299 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.330324 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.330332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.330341 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.330348 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.434636 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.434670 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.434680 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.434692 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.434704 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.536583 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.536612 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.536622 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.536632 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.536639 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.638587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.638613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.638621 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.638630 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.638636 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.740387 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.740416 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.740425 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.740435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.740442 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.841543 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.841571 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.841579 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.841591 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.841599 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.943245 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.943266 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.943274 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.943284 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:18 crc kubenswrapper[4583]: I1209 14:59:18.943291 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:18Z","lastTransitionTime":"2025-12-09T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.045171 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.045198 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.045208 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.045220 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.045229 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.146599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.146626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.146637 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.146649 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.146657 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.248461 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.248495 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.248506 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.248519 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.248528 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.350315 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.350347 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.350357 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.350369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.350377 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.452019 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.452063 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.452075 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.452088 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.452097 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.554258 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.554305 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.554320 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.554335 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.554347 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.655778 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.655807 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.655815 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.655826 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.655834 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.757323 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.757353 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.757362 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.757372 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.757379 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.859057 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.859091 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.859099 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.859111 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.859119 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.960428 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.960456 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.960463 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.960472 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:19 crc kubenswrapper[4583]: I1209 14:59:19.960479 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:19Z","lastTransitionTime":"2025-12-09T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.061501 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.061528 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.061538 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.061547 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.061555 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.077970 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.078016 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.078067 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:20 crc kubenswrapper[4583]: E1209 14:59:20.078175 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.078191 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:20 crc kubenswrapper[4583]: E1209 14:59:20.078276 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:20 crc kubenswrapper[4583]: E1209 14:59:20.078328 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:20 crc kubenswrapper[4583]: E1209 14:59:20.078388 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.163238 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.163263 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.163271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.163280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.163287 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.265045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.265064 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.265071 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.265081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.265089 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.366776 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.366795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.366802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.366811 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.366819 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.468211 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.468234 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.468242 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.468251 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.468259 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.569272 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.569303 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.569312 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.569323 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.569333 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.670702 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.670726 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.670733 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.670766 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.670775 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.772213 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.772259 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.772274 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.772291 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.772304 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.874087 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.874112 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.874120 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.874128 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.874136 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.975282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.975306 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.975314 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.975323 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:20 crc kubenswrapper[4583]: I1209 14:59:20.975330 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:20Z","lastTransitionTime":"2025-12-09T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.076438 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.076477 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.076485 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.076499 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.076507 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.177628 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.177660 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.177668 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.177679 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.177688 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.279960 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.279990 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.279997 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.280007 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.280044 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.381783 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.381809 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.381818 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.381829 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.381837 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.483381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.483405 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.483413 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.483424 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.483432 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.584600 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.584631 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.584659 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.584670 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.584678 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.686283 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.686318 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.686328 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.686340 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.686349 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.787767 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.787793 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.787802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.787811 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.787827 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.889182 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.889215 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.889224 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.889237 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.889246 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.990482 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.990512 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.990521 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.990531 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:21 crc kubenswrapper[4583]: I1209 14:59:21.990539 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:21Z","lastTransitionTime":"2025-12-09T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.077823 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.077869 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:22 crc kubenswrapper[4583]: E1209 14:59:22.077904 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.077915 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:22 crc kubenswrapper[4583]: E1209 14:59:22.077979 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.078004 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:22 crc kubenswrapper[4583]: E1209 14:59:22.078071 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:22 crc kubenswrapper[4583]: E1209 14:59:22.078117 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.078590 4583 scope.go:117] "RemoveContainer" containerID="3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26" Dec 09 14:59:22 crc kubenswrapper[4583]: E1209 14:59:22.078794 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.090428 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.091639 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.091670 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.091680 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.091691 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.091698 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.097828 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.106226 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.113620 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.120837 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.127637 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.133530 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.140246 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.147514 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.153798 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.161486 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.170458 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.177451 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.189681 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.192976 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.193003 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.193012 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.193032 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.193040 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.198405 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.209719 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.216406 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.223600 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.294684 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.294767 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.294778 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.294788 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.294975 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.396623 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.396683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.396697 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.396709 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.396718 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.498870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.499034 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.499043 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.499057 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.499065 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.601239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.601294 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.601306 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.601319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.601326 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.702779 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.702811 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.702819 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.702831 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.702839 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.804342 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.804373 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.804382 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.804393 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.804401 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.906124 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.906151 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.906159 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.906171 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:22 crc kubenswrapper[4583]: I1209 14:59:22.906178 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:22Z","lastTransitionTime":"2025-12-09T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.008120 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.008149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.008158 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.008167 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.008175 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.109503 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.109533 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.109542 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.109570 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.109579 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.211407 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.211439 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.211448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.211459 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.211467 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.312838 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.312872 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.312881 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.312895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.312903 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.415178 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.415206 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.415215 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.415226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.415235 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.479937 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.479968 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.479977 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.479990 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.479999 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: E1209 14:59:23.488228 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:23Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.490560 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.490589 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.490598 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.490609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.490617 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: E1209 14:59:23.498737 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:23Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.500713 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.500774 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.500785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.500797 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.500805 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: E1209 14:59:23.508673 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:23Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.510712 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.510741 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.510761 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.510772 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.510779 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: E1209 14:59:23.518332 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:23Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.520403 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.520427 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.520437 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.520446 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.520454 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: E1209 14:59:23.527872 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:23Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:23 crc kubenswrapper[4583]: E1209 14:59:23.527970 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.528726 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.528764 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.528773 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.528785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.528792 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.629894 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.629919 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.629928 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.629937 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.629951 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.731739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.731794 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.731802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.731814 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.731823 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.833692 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.833723 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.833733 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.833759 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.833768 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.935430 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.935458 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.935466 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.935480 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:23 crc kubenswrapper[4583]: I1209 14:59:23.935489 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:23Z","lastTransitionTime":"2025-12-09T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.037504 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.037541 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.037551 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.037564 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.037573 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.078150 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.078192 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:24 crc kubenswrapper[4583]: E1209 14:59:24.078236 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.078273 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.078338 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:24 crc kubenswrapper[4583]: E1209 14:59:24.078346 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:24 crc kubenswrapper[4583]: E1209 14:59:24.078394 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:24 crc kubenswrapper[4583]: E1209 14:59:24.078446 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.138984 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.139027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.139037 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.139050 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.139058 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.241078 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.241108 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.241116 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.241127 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.241135 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.342580 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.342611 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.342620 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.342630 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.342639 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.444468 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.444520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.444529 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.444543 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.444552 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.546247 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.546271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.546280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.546293 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.546301 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.648183 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.648210 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.648218 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.648229 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.648236 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.749870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.749896 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.749904 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.749915 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.749923 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.851890 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.851925 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.851935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.851948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.851958 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.953796 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.953831 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.953840 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.953852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:24 crc kubenswrapper[4583]: I1209 14:59:24.953860 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:24Z","lastTransitionTime":"2025-12-09T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.055604 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.055633 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.055642 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.055653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.055663 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.158026 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.158052 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.158061 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.158072 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.158081 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.259813 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.259850 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.259861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.259875 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.259885 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.361452 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.361499 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.361507 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.361520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.361529 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.463310 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.463337 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.463345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.463356 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.463364 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.565677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.565710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.565719 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.565731 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.565741 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.667370 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.667415 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.667431 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.667441 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.667447 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.768589 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.768622 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.768631 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.768642 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.768650 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.870434 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.870456 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.870464 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.870496 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.870504 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.972096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.972145 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.972154 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.972164 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:25 crc kubenswrapper[4583]: I1209 14:59:25.972172 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:25Z","lastTransitionTime":"2025-12-09T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.073772 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.073805 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.073815 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.073827 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.073836 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.078044 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.078082 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.078089 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.078083 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:26 crc kubenswrapper[4583]: E1209 14:59:26.078129 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:26 crc kubenswrapper[4583]: E1209 14:59:26.078176 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:26 crc kubenswrapper[4583]: E1209 14:59:26.078259 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:26 crc kubenswrapper[4583]: E1209 14:59:26.078343 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.175970 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.176110 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.176168 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.176243 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.176299 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.277793 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.277830 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.277839 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.277852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.277862 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.380089 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.380125 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.380133 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.380149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.380165 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.482183 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.482212 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.482220 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.482231 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.482239 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.583397 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.583428 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.583437 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.583450 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.583458 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.685626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.685667 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.685677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.685688 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.685696 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.787722 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.787781 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.787790 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.787802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.787812 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.889482 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.889512 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.889521 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.889534 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.889542 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.991258 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.991288 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.991297 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.991309 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:26 crc kubenswrapper[4583]: I1209 14:59:26.991320 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:26Z","lastTransitionTime":"2025-12-09T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.092681 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.092718 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.092728 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.092740 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.092789 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.194151 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.194176 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.194185 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.194195 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.194202 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.295910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.295954 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.295964 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.295975 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.295981 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.397343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.397369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.397378 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.397386 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.397392 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.498626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.498648 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.498656 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.498665 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.498673 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.599989 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.600046 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.600055 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.600068 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.600082 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.701609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.701645 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.701653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.701666 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.701674 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.802718 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.802780 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.802791 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.802804 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.802814 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.904301 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.904338 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.904347 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.904358 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:27 crc kubenswrapper[4583]: I1209 14:59:27.904367 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:27Z","lastTransitionTime":"2025-12-09T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.005800 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.005821 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.005829 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.005841 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.005848 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.073528 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:28 crc kubenswrapper[4583]: E1209 14:59:28.073651 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:59:28 crc kubenswrapper[4583]: E1209 14:59:28.073693 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 15:00:00.073679698 +0000 UTC m=+98.281128796 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.077973 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.078034 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.078076 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:28 crc kubenswrapper[4583]: E1209 14:59:28.078076 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:28 crc kubenswrapper[4583]: E1209 14:59:28.078135 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:28 crc kubenswrapper[4583]: E1209 14:59:28.078178 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.078296 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:28 crc kubenswrapper[4583]: E1209 14:59:28.078350 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.106975 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.107030 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.107039 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.107051 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.107059 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.208735 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.208771 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.208779 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.208789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.208797 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.310349 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.310378 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.310404 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.310421 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.310430 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.412301 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.412357 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.412366 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.412379 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.412388 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.514309 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.514337 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.514346 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.514356 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.514364 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.616190 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.616217 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.616226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.616237 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.616246 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.718658 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.718690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.718701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.718713 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.718720 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.820661 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.820690 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.820701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.820715 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.820724 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.922506 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.922544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.922554 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.922571 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:28 crc kubenswrapper[4583]: I1209 14:59:28.922584 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:28Z","lastTransitionTime":"2025-12-09T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.024669 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.024706 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.024715 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.024729 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.024758 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.126491 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.126530 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.126540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.126553 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.126562 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.228203 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.228241 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.228250 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.228264 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.228277 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.330380 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.330424 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.330433 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.330445 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.330456 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.333967 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/0.log" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.334018 4583 generic.go:334] "Generic (PLEG): container finished" podID="c240c9e3-573f-4237-b433-deb0f21e0123" containerID="1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076" exitCode=1 Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.334040 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerDied","Data":"1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.334289 4583 scope.go:117] "RemoveContainer" containerID="1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.343921 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.354181 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.361217 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.370041 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.379021 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.387103 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.399982 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.408633 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.416291 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.427527 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.432278 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.432378 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.432462 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.432628 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.432794 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.434488 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.442094 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.448916 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.457040 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.464691 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.472125 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.479796 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.488768 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:29Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.534825 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.534858 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.534870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.534884 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.534894 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.636572 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.636599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.636607 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.636620 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.636630 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.738225 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.738295 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.738309 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.738327 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.738339 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.839906 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.839933 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.839941 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.839953 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.839962 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.942159 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.942182 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.942190 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.942200 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:29 crc kubenswrapper[4583]: I1209 14:59:29.942207 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:29Z","lastTransitionTime":"2025-12-09T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.043960 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.044002 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.044019 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.044031 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.044038 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.078223 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:30 crc kubenswrapper[4583]: E1209 14:59:30.078297 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.078332 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.078425 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:30 crc kubenswrapper[4583]: E1209 14:59:30.078451 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.078453 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:30 crc kubenswrapper[4583]: E1209 14:59:30.078654 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:30 crc kubenswrapper[4583]: E1209 14:59:30.078546 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.145448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.145553 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.145618 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.145684 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.145776 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.247120 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.247143 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.247150 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.247160 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.247167 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.337657 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/0.log" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.337693 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerStarted","Data":"ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.360806 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.360836 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.360845 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.360858 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.360866 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.360919 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.373983 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.384201 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.392934 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.401308 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.409122 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.416550 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.425841 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.433055 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.441608 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.448250 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.456290 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.463252 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.463283 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.463292 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.463305 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.463315 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.464650 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.471966 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.486231 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.494483 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.507270 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.516145 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.565668 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.565697 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.565706 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.565720 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.565728 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.667178 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.667213 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.667224 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.667240 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.667250 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.768494 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.768556 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.768566 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.768577 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.768585 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.870192 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.870230 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.870239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.870252 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.870262 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.971569 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.971590 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.971597 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.971609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:30 crc kubenswrapper[4583]: I1209 14:59:30.971617 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:30Z","lastTransitionTime":"2025-12-09T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.073227 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.073250 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.073258 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.073267 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.073275 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.175121 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.175149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.175157 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.175169 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.175176 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.277033 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.277069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.277077 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.277090 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.277099 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.378613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.378648 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.378659 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.378673 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.378681 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.479777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.479803 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.479811 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.479821 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.479829 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.581996 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.582036 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.582045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.582054 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.582061 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.683450 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.683480 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.683488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.683497 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.683505 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.785525 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.785564 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.785576 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.785590 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.785601 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.887169 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.887203 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.887212 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.887228 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.887236 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.988850 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.988884 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.988895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.988907 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:31 crc kubenswrapper[4583]: I1209 14:59:31.988918 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:31Z","lastTransitionTime":"2025-12-09T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.078591 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.078606 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.078642 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.078675 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:32 crc kubenswrapper[4583]: E1209 14:59:32.078793 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:32 crc kubenswrapper[4583]: E1209 14:59:32.078865 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:32 crc kubenswrapper[4583]: E1209 14:59:32.078954 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:32 crc kubenswrapper[4583]: E1209 14:59:32.079014 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.087996 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.090135 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.090165 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.090174 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.090187 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.090196 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.097161 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.104979 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.112490 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.120579 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.131493 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.138313 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.144355 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.151247 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.162995 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.172524 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.179726 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.191936 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.191967 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.191977 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.191989 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.191997 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.193438 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.201737 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.210438 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.218274 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.229950 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.237157 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.293363 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.293398 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.293410 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.293427 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.293437 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.396919 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.397190 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.397202 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.397232 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.397245 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.499492 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.499532 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.499542 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.499557 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.499570 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.601839 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.601890 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.601901 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.601917 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.601928 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.703613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.703675 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.703691 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.703701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.703708 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.805693 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.805731 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.805770 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.805785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.805795 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.907389 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.907454 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.907470 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.907493 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:32 crc kubenswrapper[4583]: I1209 14:59:32.907512 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:32Z","lastTransitionTime":"2025-12-09T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.009442 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.009472 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.009482 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.009492 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.009501 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.111567 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.111587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.111597 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.111608 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.111615 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.213139 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.213164 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.213192 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.213201 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.213211 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.314893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.314917 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.314926 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.314935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.314942 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.417003 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.417045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.417058 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.417072 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.417084 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.518987 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.519014 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.519024 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.519054 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.519062 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.620046 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.620063 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.620070 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.620080 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.620088 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.721238 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.721256 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.721263 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.721275 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.721282 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.755706 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.755739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.755780 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.755794 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.755807 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: E1209 14:59:33.765985 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.771603 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.771633 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.771642 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.771656 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.771664 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: E1209 14:59:33.785956 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.788279 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.788325 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.788335 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.788346 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.788354 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: E1209 14:59:33.797346 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.799734 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.799773 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.799783 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.799793 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.799800 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: E1209 14:59:33.808277 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.810391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.810414 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.810423 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.810433 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.810440 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: E1209 14:59:33.818635 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:33 crc kubenswrapper[4583]: E1209 14:59:33.818759 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.822404 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.822427 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.822437 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.822447 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.822455 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.923822 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.923853 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.923865 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.923895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:33 crc kubenswrapper[4583]: I1209 14:59:33.923904 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:33Z","lastTransitionTime":"2025-12-09T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.025877 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.025906 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.025916 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.025926 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.025933 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.078454 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.078482 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:34 crc kubenswrapper[4583]: E1209 14:59:34.078547 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.078579 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.078585 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:34 crc kubenswrapper[4583]: E1209 14:59:34.078665 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:34 crc kubenswrapper[4583]: E1209 14:59:34.078704 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:34 crc kubenswrapper[4583]: E1209 14:59:34.078792 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.079230 4583 scope.go:117] "RemoveContainer" containerID="3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.127289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.127322 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.127333 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.127347 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.127357 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.229178 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.229206 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.229214 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.229226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.229235 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.331382 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.331429 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.331438 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.331454 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.331464 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.348339 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/2.log" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.350300 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.350716 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.367304 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.381653 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.389854 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.399920 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.408362 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.416134 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.423813 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.432823 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.432998 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.433033 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.433043 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.433064 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.433073 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.440447 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.449210 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.456294 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.465886 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.475198 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.484288 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.498320 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.516262 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.524861 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.534701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.534823 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.534888 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.534961 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.535019 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.537300 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:34Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.636591 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.636625 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.636634 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.636648 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.636655 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.737987 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.738191 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.738269 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.738330 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.738392 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.840585 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.840633 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.840643 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.840658 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.840668 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.942386 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.942436 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.942446 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.942463 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:34 crc kubenswrapper[4583]: I1209 14:59:34.942475 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:34Z","lastTransitionTime":"2025-12-09T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.044456 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.044935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.044998 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.045069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.045126 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.147309 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.147335 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.147343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.147353 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.147360 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.251711 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.251762 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.251774 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.251786 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.251794 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.352844 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.352874 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.352883 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.352893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.352901 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.354280 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/3.log" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.354802 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/2.log" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.356785 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" exitCode=1 Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.356815 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.356842 4583 scope.go:117] "RemoveContainer" containerID="3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.357296 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 14:59:35 crc kubenswrapper[4583]: E1209 14:59:35.357443 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.368294 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.376648 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.386474 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.393598 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.401635 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.409439 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.416697 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.423724 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.431660 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.440149 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.447716 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.454143 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.455054 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.455097 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.455106 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.455122 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.455132 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.462130 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.470604 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.478497 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.491282 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.503876 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3218d3109417d2fc054c4e4ed3b74ef47ac634e215d977f6824d67cfda5ddb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:07Z\\\",\\\"message\\\":\\\"s/ovnkube-control-plane-749d76644c-lk778 openshift-etcd/etcd-crc openshift-kube-apiserver/kube-apiserver-crc openshift-multus/network-metrics-daemon-9j9s6 openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-jmtvl openshift-multus/multus-additional-cni-plugins-2vxtt openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nF1209 14:59:07.670100 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:07Z is after 2025-08-24T17:21:41Z]\\\\nI1209 14:59:07.670115 6302 obj_retry.go:303] Retry \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:34Z\\\",\\\"message\\\":\\\"\\\\nI1209 14:59:34.703590 6727 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1209 14:59:34.703593 6727 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-controller-manager/kube-controller-manager]} name:Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ba175bbe-5cc4-47e6-a32d-57693e1320bd}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1209 14:59:34.703627 6727 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Inte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.511151 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.556793 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.556821 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.556832 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.556844 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.556851 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.658736 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.658777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.658787 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.658800 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.658808 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.760712 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.760739 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.760764 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.760781 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.760788 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.862569 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.862606 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.862615 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.862628 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.862636 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.964572 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.964602 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.964611 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.964623 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:35 crc kubenswrapper[4583]: I1209 14:59:35.964632 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:35Z","lastTransitionTime":"2025-12-09T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.066282 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.066309 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.066316 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.066326 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.066333 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.078576 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.078596 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.078622 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:36 crc kubenswrapper[4583]: E1209 14:59:36.078657 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.078676 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:36 crc kubenswrapper[4583]: E1209 14:59:36.078797 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:36 crc kubenswrapper[4583]: E1209 14:59:36.078848 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:36 crc kubenswrapper[4583]: E1209 14:59:36.078902 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.168103 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.168140 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.168149 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.168162 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.168171 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.269547 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.269576 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.269585 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.269597 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.269605 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.360380 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/3.log" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.362951 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 14:59:36 crc kubenswrapper[4583]: E1209 14:59:36.363102 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.370641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.370668 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.370677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.370688 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.370696 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.371628 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.379431 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.385671 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.393561 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.401617 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.408479 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.419763 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.427151 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.438804 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:34Z\\\",\\\"message\\\":\\\"\\\\nI1209 14:59:34.703590 6727 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1209 14:59:34.703593 6727 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-controller-manager/kube-controller-manager]} name:Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ba175bbe-5cc4-47e6-a32d-57693e1320bd}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1209 14:59:34.703627 6727 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Inte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.445548 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.453001 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.461408 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.467936 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.472468 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.472498 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.472506 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.472521 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.472529 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.475319 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.482613 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.489606 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.496066 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.501843 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:36Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.574542 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.574566 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.574574 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.574585 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.574592 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.676300 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.676327 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.676337 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.676347 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.676371 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.778086 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.778113 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.778122 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.778131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.778139 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.880032 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.880059 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.880067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.880102 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.880110 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.982026 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.982054 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.982062 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.982085 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:36 crc kubenswrapper[4583]: I1209 14:59:36.982092 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:36Z","lastTransitionTime":"2025-12-09T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.084128 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.084156 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.084165 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.084201 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.084215 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.186009 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.186045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.186055 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.186068 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.186086 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.287784 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.287809 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.287817 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.287828 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.287836 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.390250 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.390289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.390300 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.390312 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.390322 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.492290 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.492327 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.492337 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.492350 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.492360 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.593975 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.594003 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.594013 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.594023 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.594030 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.696290 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.696324 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.696333 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.696345 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.696353 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.798204 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.798241 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.798250 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.798262 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.798271 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.900021 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.900056 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.900067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.900078 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:37 crc kubenswrapper[4583]: I1209 14:59:37.900107 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:37Z","lastTransitionTime":"2025-12-09T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.001912 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.002006 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.002075 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.002159 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.002224 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.078490 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:38 crc kubenswrapper[4583]: E1209 14:59:38.078574 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.078591 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.078612 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:38 crc kubenswrapper[4583]: E1209 14:59:38.078879 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.078665 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:38 crc kubenswrapper[4583]: E1209 14:59:38.078984 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:38 crc kubenswrapper[4583]: E1209 14:59:38.079070 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.103605 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.103632 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.103641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.103653 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.103661 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.205057 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.205122 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.205131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.205139 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.205146 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.306833 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.306865 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.306873 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.306885 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.306893 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.408795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.408828 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.408838 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.408851 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.408860 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.510846 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.510883 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.510895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.510909 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.510918 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.612385 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.612418 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.612431 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.612442 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.612451 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.713981 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.714007 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.714016 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.714034 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.714042 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.815244 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.815366 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.815452 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.815520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.815585 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.917346 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.917371 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.917379 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.917390 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:38 crc kubenswrapper[4583]: I1209 14:59:38.917397 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:38Z","lastTransitionTime":"2025-12-09T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.019504 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.019528 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.019537 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.019546 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.019552 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.120789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.120817 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.120825 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.120836 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.120844 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.222453 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.222507 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.222518 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.222532 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.222542 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.323789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.323821 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.323832 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.323845 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.323853 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.425033 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.425059 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.425067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.425077 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.425087 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.526935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.526969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.526979 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.526993 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.527002 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.628336 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.628380 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.628391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.628408 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.628419 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.730789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.730836 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.730852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.730869 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.730882 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.832587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.832612 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.832621 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.832631 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.832638 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.934217 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.934259 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.934267 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.934279 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:39 crc kubenswrapper[4583]: I1209 14:59:39.934287 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:39Z","lastTransitionTime":"2025-12-09T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.036235 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.036263 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.036271 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.036281 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.036289 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.078210 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.078241 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:40 crc kubenswrapper[4583]: E1209 14:59:40.078313 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.078345 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.078353 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:40 crc kubenswrapper[4583]: E1209 14:59:40.078429 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:40 crc kubenswrapper[4583]: E1209 14:59:40.078480 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:40 crc kubenswrapper[4583]: E1209 14:59:40.078545 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.137883 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.137911 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.137919 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.137931 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.137940 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.239737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.239783 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.239870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.239885 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.239893 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.341763 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.341790 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.341798 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.341809 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.341818 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.443213 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.443242 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.443253 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.443264 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.443271 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.544901 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.544927 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.544935 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.544944 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.544950 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.646871 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.646903 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.646913 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.646939 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.646951 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.748722 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.748765 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.748775 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.748785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.748792 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.850556 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.850586 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.850596 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.850607 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.850616 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.952382 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.952533 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.952544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.952554 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:40 crc kubenswrapper[4583]: I1209 14:59:40.952561 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:40Z","lastTransitionTime":"2025-12-09T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.054418 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.054641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.054651 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.054664 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.054674 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.156340 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.156372 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.156383 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.156395 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.156403 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.257803 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.257830 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.257839 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.257849 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.257857 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.359772 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.359817 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.359826 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.359839 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.359846 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.461256 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.461284 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.461292 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.461304 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.461312 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.563116 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.563156 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.563165 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.563175 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.563182 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.665101 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.665146 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.665156 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.665168 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.665177 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.766508 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.766530 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.766538 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.766549 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.766558 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.867999 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.868027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.868037 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.868047 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.868064 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.969808 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.969829 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.969837 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.969848 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:41 crc kubenswrapper[4583]: I1209 14:59:41.969855 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:41Z","lastTransitionTime":"2025-12-09T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.071079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.071125 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.071143 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.071155 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.071162 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.077872 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.077897 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.077918 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:42 crc kubenswrapper[4583]: E1209 14:59:42.077951 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.078039 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:42 crc kubenswrapper[4583]: E1209 14:59:42.078112 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:42 crc kubenswrapper[4583]: E1209 14:59:42.078163 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:42 crc kubenswrapper[4583]: E1209 14:59:42.078168 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.085971 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.098436 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.106731 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.113919 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.122105 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.128672 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.141612 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.150099 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.157272 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.173070 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.173195 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.173285 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.173355 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.173434 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.175126 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:34Z\\\",\\\"message\\\":\\\"\\\\nI1209 14:59:34.703590 6727 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1209 14:59:34.703593 6727 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-controller-manager/kube-controller-manager]} name:Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ba175bbe-5cc4-47e6-a32d-57693e1320bd}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1209 14:59:34.703627 6727 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Inte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.182024 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.189716 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.197536 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.204450 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.213060 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.219503 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.226924 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.234241 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.274681 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.274737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.274768 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.274781 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.274790 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.376145 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.376171 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.376182 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.376192 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.376216 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.477867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.477887 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.477896 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.477906 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.477914 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.579065 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.579088 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.579096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.579106 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.579119 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.681420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.681572 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.681638 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.681703 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.681773 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.783969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.783999 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.784009 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.784038 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.784045 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.886198 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.886367 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.886432 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.886493 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.886543 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.987784 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.987820 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.987829 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.987841 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:42 crc kubenswrapper[4583]: I1209 14:59:42.987851 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:42Z","lastTransitionTime":"2025-12-09T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.089251 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.089278 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.089287 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.089296 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.089303 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.190541 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.190564 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.190571 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.190580 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.190589 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.292130 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.292176 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.292187 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.292200 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.292208 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.393620 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.393775 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.393847 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.393913 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.393972 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.496075 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.496107 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.496116 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.496126 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.496133 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.598049 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.598077 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.598085 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.598096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.598106 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.699589 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.699729 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.699843 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.699910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.699971 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.804258 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.804287 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.804296 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.804305 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.804313 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.896038 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.896115 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:47.896099768 +0000 UTC m=+146.103548857 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.896182 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.896255 4583 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.896288 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 15:00:47.896281625 +0000 UTC m=+146.103730713 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.906178 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.906223 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.906232 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.906243 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.906253 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:43Z","lastTransitionTime":"2025-12-09T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.996508 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.996536 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:43 crc kubenswrapper[4583]: I1209 14:59:43.996575 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996650 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996662 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996670 4583 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996673 4583 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996700 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-09 15:00:47.996693332 +0000 UTC m=+146.204142420 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996766 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-09 15:00:47.996724632 +0000 UTC m=+146.204173730 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996683 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996798 4583 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996809 4583 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:43 crc kubenswrapper[4583]: E1209 14:59:43.996851 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-09 15:00:47.996840462 +0000 UTC m=+146.204289560 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.007400 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.007426 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.007435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.007445 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.007452 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.077973 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.077984 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.078010 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.078016 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.078084 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.078313 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.078427 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.078548 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.084521 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.108815 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.108841 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.108849 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.108858 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.108866 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.190093 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.190120 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.190128 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.190138 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.190144 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.198397 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.200442 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.200467 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.200476 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.200484 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.200491 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.208208 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.210095 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.210132 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.210141 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.210154 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.210175 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.217560 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.219572 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.219618 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.219626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.219637 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.219645 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.227072 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.229018 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.229045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.229054 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.229063 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.229069 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.236512 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:44Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:44 crc kubenswrapper[4583]: E1209 14:59:44.236624 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.237502 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.237528 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.237536 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.237545 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.237552 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.339228 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.339259 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.339269 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.339279 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.339288 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.441125 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.441153 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.441171 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.441180 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.441188 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.543494 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.543522 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.543529 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.543540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.543546 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.645399 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.645432 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.645442 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.645452 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.645461 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.746777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.746805 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.746812 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.746821 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.746828 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.848445 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.848474 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.848482 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.848491 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.848498 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.950627 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.950665 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.950674 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.950686 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:44 crc kubenswrapper[4583]: I1209 14:59:44.950695 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:44Z","lastTransitionTime":"2025-12-09T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.051993 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.052047 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.052061 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.052074 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.052087 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.153182 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.153211 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.153223 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.153233 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.153242 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.255022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.255051 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.255058 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.255067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.255073 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.356391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.356414 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.356422 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.356430 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.356438 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.457740 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.457947 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.458012 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.458072 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.458136 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.560544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.560698 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.560792 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.560863 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.560928 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.662435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.662599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.662674 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.662737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.662835 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.764354 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.764419 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.764431 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.764446 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.764456 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.866480 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.866510 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.866520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.866530 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.866545 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.968131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.968270 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.968364 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.968435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:45 crc kubenswrapper[4583]: I1209 14:59:45.968496 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:45Z","lastTransitionTime":"2025-12-09T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.070377 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.070438 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.070448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.070461 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.070469 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.077893 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.078009 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.078120 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:46 crc kubenswrapper[4583]: E1209 14:59:46.078118 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.078168 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:46 crc kubenswrapper[4583]: E1209 14:59:46.078275 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:46 crc kubenswrapper[4583]: E1209 14:59:46.078340 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:46 crc kubenswrapper[4583]: E1209 14:59:46.078405 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.171731 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.171785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.171795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.171807 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.171816 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.273159 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.273199 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.273208 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.273219 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.273228 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.375152 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.375207 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.375216 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.375229 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.375237 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.477289 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.477332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.477341 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.477355 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.477368 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.579410 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.579649 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.579718 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.579810 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.579879 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.681508 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.681597 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.681677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.681758 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.681825 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.783324 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.783361 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.783371 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.783383 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.783392 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.884850 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.884875 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.884883 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.884895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.884904 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.986992 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.987022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.987032 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.987041 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:46 crc kubenswrapper[4583]: I1209 14:59:46.987047 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:46Z","lastTransitionTime":"2025-12-09T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.088722 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.088786 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.088797 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.088817 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.088829 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.191073 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.191105 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.191113 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.191124 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.191132 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.292803 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.292834 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.292842 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.292853 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.292861 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.394118 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.394155 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.394165 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.394178 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.394186 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.496033 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.496056 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.496064 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.496073 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.496080 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.597343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.597370 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.597377 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.597387 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.597393 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.698681 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.698737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.698771 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.698786 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.698797 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.800553 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.800613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.800625 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.800638 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.800647 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.902165 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.902192 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.902211 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.902225 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:47 crc kubenswrapper[4583]: I1209 14:59:47.902232 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:47Z","lastTransitionTime":"2025-12-09T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.003522 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.003570 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.003579 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.003592 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.003600 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.078470 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:48 crc kubenswrapper[4583]: E1209 14:59:48.078554 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.078567 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.078675 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:48 crc kubenswrapper[4583]: E1209 14:59:48.078722 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.078938 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:48 crc kubenswrapper[4583]: E1209 14:59:48.079061 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:48 crc kubenswrapper[4583]: E1209 14:59:48.079432 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.079523 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 14:59:48 crc kubenswrapper[4583]: E1209 14:59:48.079708 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.105709 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.105737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.105762 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.105774 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.105782 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.207643 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.207682 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.207692 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.207706 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.207715 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.309029 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.309075 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.309085 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.309099 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.309111 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.411251 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.411279 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.411287 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.411297 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.411305 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.513285 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.513323 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.513331 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.513344 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.513353 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.615034 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.615062 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.615070 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.615079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.615087 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.716867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.716893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.716900 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.716910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.716917 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.818814 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.818839 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.818847 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.818856 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.818864 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.920866 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.921162 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.921175 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.921189 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:48 crc kubenswrapper[4583]: I1209 14:59:48.921197 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:48Z","lastTransitionTime":"2025-12-09T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.023945 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.023979 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.023988 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.024000 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.024008 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.125381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.125428 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.125439 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.125453 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.125464 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.227494 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.227526 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.227535 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.227546 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.227555 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.329807 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.329858 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.329867 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.329880 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.329892 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.431887 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.431920 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.431928 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.431939 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.431948 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.533057 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.533089 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.533099 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.533111 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.533120 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.635291 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.635319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.635327 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.635336 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.635343 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.737337 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.737364 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.737373 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.737381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.737388 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.838551 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.838578 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.838586 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.838595 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.838602 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.940255 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.940281 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.940290 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.940298 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:49 crc kubenswrapper[4583]: I1209 14:59:49.940305 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:49Z","lastTransitionTime":"2025-12-09T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.041599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.041638 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.041647 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.041660 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.041669 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.078070 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.078120 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.078121 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.078190 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:50 crc kubenswrapper[4583]: E1209 14:59:50.078187 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:50 crc kubenswrapper[4583]: E1209 14:59:50.078266 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:50 crc kubenswrapper[4583]: E1209 14:59:50.078340 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:50 crc kubenswrapper[4583]: E1209 14:59:50.078383 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.143385 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.143416 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.143424 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.143434 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.143442 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.245260 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.245317 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.245326 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.245339 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.245347 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.347458 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.347494 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.347503 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.347516 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.347526 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.448861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.448896 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.448905 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.448918 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.448926 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.550660 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.550693 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.550701 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.550937 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.550948 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.652631 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.652673 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.652683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.652698 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.652711 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.754321 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.754364 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.754376 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.754391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.754401 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.856416 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.856451 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.856459 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.856489 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.856498 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.958441 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.958470 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.958478 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.958489 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:50 crc kubenswrapper[4583]: I1209 14:59:50.958498 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:50Z","lastTransitionTime":"2025-12-09T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.060613 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.060649 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.060657 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.060670 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.060678 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.162006 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.162052 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.162061 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.162073 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.162082 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.264129 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.264159 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.264168 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.264178 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.264186 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.366205 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.366256 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.366266 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.366277 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.366286 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.468587 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.468619 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.468628 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.468639 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.468646 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.570626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.570683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.570692 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.570705 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.570714 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.672921 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.672949 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.672958 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.672969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.672977 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.774987 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.775018 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.775027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.775038 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.775048 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.876714 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.876772 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.876781 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.876798 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.876808 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.978951 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.978983 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.978991 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.979005 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:51 crc kubenswrapper[4583]: I1209 14:59:51.979013 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:51Z","lastTransitionTime":"2025-12-09T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.078080 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:52 crc kubenswrapper[4583]: E1209 14:59:52.078167 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.078301 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.078325 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.078428 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:52 crc kubenswrapper[4583]: E1209 14:59:52.078488 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:52 crc kubenswrapper[4583]: E1209 14:59:52.078411 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:52 crc kubenswrapper[4583]: E1209 14:59:52.078572 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.080821 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.080851 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.080861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.080872 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.080882 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.086821 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5848176-adf1-47d0-b043-1b305357d808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rldnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9j9s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.094717 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc10229f8e205be1d97dcb29cd626ca5cd4d1d09998021486160a8963298d437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44c894eb9a334a36daa9875e902d06c86c63328985d2742b0dee3080ee62a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.102010 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.109483 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.116409 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce1d933-c482-444a-86c2-3e9f1315eb16\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c347f89a7ea4f7cee34692b6fbc79da7de53c6592179c78169a3b2270f39ffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2sj2c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jmtvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.124718 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41313e81-1231-4cd0-a9b6-8a9d336704de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://843858c9c33e437e99e058eae54cad296cd5bb93256dcce18e0b2084b176eb2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a101f5da91342cfed19ff3f32e21d38b240376eb77e509c2bf65c127f760cc64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d7e69a99b85f226fdbe70dd184ef5aaaf1d713382395dfe5829c548c9c98e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a757711ba4e6ce3c00c0fc96291035ef83688207a61b7775ada06eaa2aa7220d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e198b160f36b29a495d9b32856d362e13a95372cee7e94db521628570a44e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09bd739bfd059cd4a97db0bc374c4a1f4b5cd23e74eb6b8e8080b5d8f136528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3580e9d4eb4b72b719912a46a74a0f55e234a4eeeb674e0d07f888546a7cd101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2vxtt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.131992 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p54bl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"011d6421-0c34-4f35-9ead-cb4189a28d2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8334af9969be1646d645904a6fb5114711242653fa21f906117eae0bf29ba9f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7k4pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:47Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p54bl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.139067 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.144938 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ck9w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e80dcf94-2f3f-4645-9860-2865a33ee9ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://963216837ec365e6e113389531865f0f5603de3df7e4150f84d0228b9d62e8ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8knc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ck9w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.152767 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2sh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c240c9e3-573f-4237-b433-deb0f21e0123\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:28Z\\\",\\\"message\\\":\\\"2025-12-09T14:58:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8\\\\n2025-12-09T14:58:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ae4543bb-9379-4471-9fef-9e6c3689a8b8 to /host/opt/cni/bin/\\\\n2025-12-09T14:58:43Z [verbose] multus-daemon started\\\\n2025-12-09T14:58:43Z [verbose] Readiness Indicator file check\\\\n2025-12-09T14:59:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:59:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pfhk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2sh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.164311 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29ac013b-8543-4f00-bc14-22441f765f91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"message\\\":\\\"le observer\\\\nW1209 14:58:39.084830 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1209 14:58:39.084918 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1209 14:58:39.087654 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1718459536/tls.crt::/tmp/serving-cert-1718459536/tls.key\\\\\\\"\\\\nI1209 14:58:39.286673 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1209 14:58:39.289224 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1209 14:58:39.289241 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1209 14:58:39.289257 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1209 14:58:39.289261 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1209 14:58:39.293901 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1209 14:58:39.293926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1209 14:58:39.293934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1209 14:58:39.293941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1209 14:58:39.293944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1209 14:58:39.293947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1209 14:58:39.294167 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1209 14:58:39.294929 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.172057 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ffdd3f6-e0a6-43f1-a641-7a70415b44b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3ae0ef7a8db1c32eced7e40b47d021a029da9d2f643ec2166dbc7c1172ef02f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0ebca1e103473b82e59ceed0dde6e9185f9d7819bef0bee99240fc3daf53dd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://218a77998d0eac3384f7594ee0a00e84f502f88d5b685f1037607f491c1623ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0b16ddd966b5fe8572f9c204464366003e893adb0374b0afdeb135d0fa083c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.182696 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.182723 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.182732 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.182757 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.182766 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.184619 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"714fb898-b201-4043-9df1-3a0a8fdc3a1a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6d1c7df905e3d1dad693b9c89cd8f84e7c4867d9e7a53f6608066c9c815157b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eca31392584e1ef2553dc788672a936b2267b4e822a14c36a9eb30cb17d809e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32a0662df1426a27381377dc83cc7c61c4adfb362c14dfb82b52bbc138229cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e8d3f933d737b081593f08a60ad897cc71f0339d012e1ee3ef78df53853a152\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64c8e6e003e7a34490690732d4a9feebb35474db366846819fc3c430f38c641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b31cac537cf436707c58fad789fe8b0469c2349e9ff3b594c3507ff9ebfb81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c123714377a4590176b3017022c51d9669ee8a6475336f337c3b97289f10f15\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7859cc8e4907c62c18e5195263b453fffbe878ef27ee3ad38aaf57dd113f6a9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.192995 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8be2e21473d67af17b35481f6a6f3d24db0b374d9adc4a238df1391fc148add\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.200955 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b03c11e25f5a8adbc56c161ba7388226b16bacbe02cb075e967d075653010a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.212596 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2acdc1e0-3af8-41bf-958f-3cd291da42db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-09T14:59:34Z\\\",\\\"message\\\":\\\"\\\\nI1209 14:59:34.703590 6727 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1209 14:59:34.703593 6727 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-controller-manager/kube-controller-manager]} name:Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ba175bbe-5cc4-47e6-a32d-57693e1320bd}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1209 14:59:34.703627 6727 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Inte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-09T14:59:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54w5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5rdc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.221720 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a6bc1c9-15f6-4c24-844f-06978e663477\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2933caf921471e882664cc73704da46aa4466d4a9402b52cb0e9722d11f763cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3798295b3cb12976e053672d42c0a54c18cdc3696f502960ac1979d0f259eaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4kdtr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lk778\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.228954 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be8f1766-3172-4048-b0c5-1c53fed20f9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7752c75cfec7b45c09e22566019aa8d18966a4c02a300f29337fcbdcdabe93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4423f84e404ea5ce7e4bb7c290f62b8f9da9d506e4e5673a58f2a9e884c6cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe59cd573e415de985057c21a25e701a0cd706f9d25d8ebbba0eab7ece846a95\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.235299 4583 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b2aa2c8-b4dd-4379-b188-2694ae998006\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6e09a35dde29506f55d43fd46acf63f1baf966cee84c909f52af402d823c535\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-09T14:58:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8c89cb93ddbfb6d04163384314515097a195eaf6e9be70c154528822e6d3006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8c89cb93ddbfb6d04163384314515097a195eaf6e9be70c154528822e6d3006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-09T14:58:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-09T14:58:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-09T14:58:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.285079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.285106 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.285114 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.285128 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.285136 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.386105 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.386475 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.386542 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.386606 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.386677 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.488434 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.488466 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.488476 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.488488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.488497 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.589949 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.589978 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.589986 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.589997 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.590006 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.691934 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.691962 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.691970 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.691982 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.691990 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.793369 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.793395 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.793411 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.793420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.793429 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.894827 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.894852 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.894860 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.894870 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.894877 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.997220 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.997335 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.997404 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.997467 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:52 crc kubenswrapper[4583]: I1209 14:59:52.997533 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:52Z","lastTransitionTime":"2025-12-09T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.099543 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.099567 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.099575 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.099584 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.099591 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.200901 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.200926 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.200933 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.200942 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.200950 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.302667 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.302700 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.302710 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.302722 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.302731 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.403895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.403953 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.403964 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.403979 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.403991 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.505544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.505590 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.505600 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.505611 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.505619 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.607602 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.607633 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.607641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.607650 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.607658 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.709045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.709079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.709087 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.709098 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.709106 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.810388 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.810418 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.810426 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.810436 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.810444 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.912280 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.912310 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.912319 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.912329 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:53 crc kubenswrapper[4583]: I1209 14:59:53.912336 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:53Z","lastTransitionTime":"2025-12-09T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.013910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.013958 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.013968 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.013984 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.013995 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.077882 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.077915 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.077970 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.077972 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.078040 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.078047 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.078096 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.078146 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.115902 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.115928 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.115937 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.115948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.115956 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.217708 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.217773 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.217794 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.217804 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.217811 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.319597 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.319624 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.319632 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.319641 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.319673 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.422026 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.422071 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.422081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.422095 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.422110 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.524374 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.524405 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.524414 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.524426 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.524433 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.626301 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.626343 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.626354 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.626368 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.626379 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.627508 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.627544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.627552 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.627566 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.627575 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.636355 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.638856 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.638889 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.638899 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.638910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.638918 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.647188 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.649518 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.649540 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.649548 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.649559 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.649566 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.657398 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.659495 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.659525 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.659534 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.659545 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.659553 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.667155 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.669259 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.669294 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.669303 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.669313 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.669320 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.676529 4583 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-09T14:59:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"30a4b563-0e9e-4808-9382-0ed5dfd7a2e7\\\",\\\"systemUUID\\\":\\\"a7c69146-0151-4a52-a15a-5504218d56ae\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-09T14:59:54Z is after 2025-08-24T17:21:41Z" Dec 09 14:59:54 crc kubenswrapper[4583]: E1209 14:59:54.676632 4583 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.727556 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.727580 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.727588 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.727597 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.727604 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.829219 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.829249 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.829257 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.829268 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.829276 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.931170 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.931205 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.931213 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.931226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:54 crc kubenswrapper[4583]: I1209 14:59:54.931234 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:54Z","lastTransitionTime":"2025-12-09T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.033215 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.033245 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.033253 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.033264 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.033273 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.134760 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.134795 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.134803 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.134815 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.134826 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.236510 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.236554 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.236564 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.236576 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.236625 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.340370 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.340405 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.340413 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.340425 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.340434 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.442682 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.442723 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.442733 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.442776 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.442788 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.544626 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.544663 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.544671 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.544683 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.544691 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.646519 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.646543 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.646552 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.646566 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.646575 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.748515 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.748545 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.748553 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.748565 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.748573 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.850513 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.850544 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.850554 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.850567 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.850580 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.952187 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.952216 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.952226 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.952239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:55 crc kubenswrapper[4583]: I1209 14:59:55.952246 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:55Z","lastTransitionTime":"2025-12-09T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.053602 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.053634 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.053643 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.053657 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.053668 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.077983 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:56 crc kubenswrapper[4583]: E1209 14:59:56.078069 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.078124 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:56 crc kubenswrapper[4583]: E1209 14:59:56.078162 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.078375 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.078421 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:56 crc kubenswrapper[4583]: E1209 14:59:56.078601 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:56 crc kubenswrapper[4583]: E1209 14:59:56.078669 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.155338 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.155372 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.155381 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.155392 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.155400 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.257269 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.257331 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.257344 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.257360 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.257371 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.359007 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.359051 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.359062 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.359075 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.359084 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.460620 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.460656 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.460664 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.460677 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.460686 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.562596 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.562708 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.562802 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.562875 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.562995 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.664054 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.664088 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.664097 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.664118 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.664128 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.765705 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.765765 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.765776 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.765789 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.765800 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.867109 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.867146 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.867155 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.867168 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.867177 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.968523 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.968697 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.968861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.968926 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:56 crc kubenswrapper[4583]: I1209 14:59:56.968984 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:56Z","lastTransitionTime":"2025-12-09T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.070828 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.070933 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.071011 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.071081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.071142 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.173092 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.173123 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.173132 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.173143 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.173151 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.274586 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.274828 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.274952 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.275091 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.275190 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.376961 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.377012 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.377022 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.377036 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.377046 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.478805 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.478838 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.478847 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.478859 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.478867 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.581024 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.581049 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.581056 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.581066 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.581074 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.682903 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.682942 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.682951 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.682964 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.682974 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.784693 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.784728 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.784737 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.784768 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.784776 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.886389 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.886434 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.886444 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.886458 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.886467 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.987917 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.987943 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.987952 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.987963 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:57 crc kubenswrapper[4583]: I1209 14:59:57.987970 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:57Z","lastTransitionTime":"2025-12-09T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.078095 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.078100 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 14:59:58 crc kubenswrapper[4583]: E1209 14:59:58.078519 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.078155 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 14:59:58 crc kubenswrapper[4583]: E1209 14:59:58.078589 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.078151 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 14:59:58 crc kubenswrapper[4583]: E1209 14:59:58.078417 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 14:59:58 crc kubenswrapper[4583]: E1209 14:59:58.078643 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.089383 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.089416 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.089425 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.089438 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.089447 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.190957 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.191067 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.191131 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.191201 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.191264 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.292914 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.292941 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.292951 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.292963 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.292972 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.394732 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.394768 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.394777 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.394787 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.394795 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.495896 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.495924 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.495932 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.495941 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.495949 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.597042 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.597139 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.597217 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.597291 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.597357 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.699183 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.699274 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.699368 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.699434 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.699486 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.800914 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.800947 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.800957 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.800969 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.800977 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.903297 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.903322 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.903342 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.903360 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:58 crc kubenswrapper[4583]: I1209 14:59:58.903368 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:58Z","lastTransitionTime":"2025-12-09T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.005376 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.005420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.005429 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.005439 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.005446 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.107055 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.107204 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.107275 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.107360 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.107437 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.208711 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.208767 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.208776 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.208785 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.208792 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.310220 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.310239 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.310247 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.310257 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.310265 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.411831 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.411857 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.411865 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.411874 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.411880 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.513113 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.513135 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.513143 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.513152 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.513159 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.614451 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.614472 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.614480 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.614490 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.614498 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.716546 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.716646 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.716721 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.716829 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.716903 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.818599 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.818984 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.819035 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.819055 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.819068 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.920376 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.920408 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.920417 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.920425 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 14:59:59 crc kubenswrapper[4583]: I1209 14:59:59.920432 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T14:59:59Z","lastTransitionTime":"2025-12-09T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.021880 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.021909 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.021917 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.021926 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.021932 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.077808 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.077825 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:00 crc kubenswrapper[4583]: E1209 15:00:00.077886 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.077913 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.077804 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:00 crc kubenswrapper[4583]: E1209 15:00:00.078042 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:00 crc kubenswrapper[4583]: E1209 15:00:00.078125 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:00 crc kubenswrapper[4583]: E1209 15:00:00.078390 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.123893 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.123997 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.124056 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.124126 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.124187 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.125247 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:00 crc kubenswrapper[4583]: E1209 15:00:00.125786 4583 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 15:00:00 crc kubenswrapper[4583]: E1209 15:00:00.125850 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs podName:b5848176-adf1-47d0-b043-1b305357d808 nodeName:}" failed. No retries permitted until 2025-12-09 15:01:04.125834677 +0000 UTC m=+162.333283765 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs") pod "network-metrics-daemon-9j9s6" (UID: "b5848176-adf1-47d0-b043-1b305357d808") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.225945 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.225996 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.226007 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.226020 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.226029 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.327579 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.327602 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.327609 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.327620 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.327628 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.428916 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.428951 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.428962 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.428982 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.428991 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.531109 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.531572 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.531649 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.531714 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.531811 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.633029 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.633061 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.633069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.633081 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.633090 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.734570 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.734784 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.734865 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.734942 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.735006 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.836243 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.836448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.836512 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.836581 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.836667 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.938333 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.938377 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.938386 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.938400 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:00 crc kubenswrapper[4583]: I1209 15:00:00.938408 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:00Z","lastTransitionTime":"2025-12-09T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.040216 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.040243 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.040252 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.040261 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.040268 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.079094 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:00:01 crc kubenswrapper[4583]: E1209 15:00:01.079212 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5rdc6_openshift-ovn-kubernetes(2acdc1e0-3af8-41bf-958f-3cd291da42db)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.141702 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.141770 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.141782 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.141797 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.141809 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.243283 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.243322 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.243332 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.243344 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.243353 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.345199 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.345234 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.345244 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.345254 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.345263 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.446886 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.446915 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.446924 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.446933 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.446970 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.548816 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.548856 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.548868 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.548880 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.548894 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.650508 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.650541 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.650551 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.650563 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.650572 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.752621 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.752643 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.752650 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.752659 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.752666 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.854102 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.854136 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.854145 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.854156 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.854165 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.955659 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.955686 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.955694 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.955703 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:01 crc kubenswrapper[4583]: I1209 15:00:01.955710 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:01Z","lastTransitionTime":"2025-12-09T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.056974 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.057130 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.057207 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.057298 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.057377 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.078275 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:02 crc kubenswrapper[4583]: E1209 15:00:02.078345 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.078384 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.078437 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:02 crc kubenswrapper[4583]: E1209 15:00:02.078483 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.078529 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:02 crc kubenswrapper[4583]: E1209 15:00:02.078710 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:02 crc kubenswrapper[4583]: E1209 15:00:02.078800 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.092650 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-p54bl" podStartSLOduration=80.092639988 podStartE2EDuration="1m20.092639988s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.085591912 +0000 UTC m=+100.293041000" watchObservedRunningTime="2025-12-09 15:00:02.092639988 +0000 UTC m=+100.300089066" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.108270 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.108258369 podStartE2EDuration="47.108258369s" podCreationTimestamp="2025-12-09 14:59:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.092693069 +0000 UTC m=+100.300142157" watchObservedRunningTime="2025-12-09 15:00:02.108258369 +0000 UTC m=+100.315707456" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.108549 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=80.108544863 podStartE2EDuration="1m20.108544863s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.107958247 +0000 UTC m=+100.315407335" watchObservedRunningTime="2025-12-09 15:00:02.108544863 +0000 UTC m=+100.315993952" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.138736 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-ck9w4" podStartSLOduration=80.138723319 podStartE2EDuration="1m20.138723319s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.138595637 +0000 UTC m=+100.346044724" watchObservedRunningTime="2025-12-09 15:00:02.138723319 +0000 UTC m=+100.346172407" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.147450 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-j2sh5" podStartSLOduration=80.147439672 podStartE2EDuration="1m20.147439672s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.146870318 +0000 UTC m=+100.354319406" watchObservedRunningTime="2025-12-09 15:00:02.147439672 +0000 UTC m=+100.354888759" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.159200 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.159232 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.159241 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.159253 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.159281 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.166891 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=82.166877621 podStartE2EDuration="1m22.166877621s" podCreationTimestamp="2025-12-09 14:58:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.15712127 +0000 UTC m=+100.364570348" watchObservedRunningTime="2025-12-09 15:00:02.166877621 +0000 UTC m=+100.374326709" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.183482 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lk778" podStartSLOduration=80.183469856 podStartE2EDuration="1m20.183469856s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.168846851 +0000 UTC m=+100.376295939" watchObservedRunningTime="2025-12-09 15:00:02.183469856 +0000 UTC m=+100.390918943" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.189548 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=18.189540391 podStartE2EDuration="18.189540391s" podCreationTimestamp="2025-12-09 14:59:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.189188261 +0000 UTC m=+100.396637349" watchObservedRunningTime="2025-12-09 15:00:02.189540391 +0000 UTC m=+100.396989479" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.206287 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=83.206273063 podStartE2EDuration="1m23.206273063s" podCreationTimestamp="2025-12-09 14:58:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.205556158 +0000 UTC m=+100.413005246" watchObservedRunningTime="2025-12-09 15:00:02.206273063 +0000 UTC m=+100.413722141" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.245644 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podStartSLOduration=80.24562998 podStartE2EDuration="1m20.24562998s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.245558284 +0000 UTC m=+100.453007371" watchObservedRunningTime="2025-12-09 15:00:02.24562998 +0000 UTC m=+100.453079068" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.256493 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2vxtt" podStartSLOduration=80.256477569 podStartE2EDuration="1m20.256477569s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:02.255966996 +0000 UTC m=+100.463416085" watchObservedRunningTime="2025-12-09 15:00:02.256477569 +0000 UTC m=+100.463926657" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.261303 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.261352 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.261363 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.261384 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.261393 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.362897 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.362925 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.362933 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.362949 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.362958 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.464452 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.464484 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.464494 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.464505 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.464512 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.566481 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.566520 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.566529 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.566542 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.566552 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.667948 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.667994 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.668003 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.668014 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.668023 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.769628 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.769659 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.769667 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.769679 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.769687 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.871426 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.871466 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.871475 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.871488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.871497 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.973910 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.973944 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.973953 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.973964 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:02 crc kubenswrapper[4583]: I1209 15:00:02.973973 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:02Z","lastTransitionTime":"2025-12-09T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.075680 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.075716 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.075728 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.075759 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.075768 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.177391 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.177427 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.177435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.177448 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.177456 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.279101 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.279144 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.279153 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.279166 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.279174 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.380708 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.380736 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.380769 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.380780 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.380789 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.482699 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.482730 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.482738 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.482764 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.482774 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.584861 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.584895 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.584903 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.584914 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.584922 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.686538 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.686610 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.686618 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.686652 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.686661 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.788982 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.789027 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.789035 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.789045 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.789054 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.890791 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.890826 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.890834 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.890848 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.890858 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.992209 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.992240 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.992248 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.992261 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:03 crc kubenswrapper[4583]: I1209 15:00:03.992269 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:03Z","lastTransitionTime":"2025-12-09T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.078666 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:04 crc kubenswrapper[4583]: E1209 15:00:04.078788 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.078829 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:04 crc kubenswrapper[4583]: E1209 15:00:04.079045 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.079411 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:04 crc kubenswrapper[4583]: E1209 15:00:04.079516 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.079586 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:04 crc kubenswrapper[4583]: E1209 15:00:04.079656 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.094077 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.094106 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.094117 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.094132 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.094141 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.196349 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.196385 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.196406 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.196420 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.196429 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.298010 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.298059 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.298069 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.298079 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.298087 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.400036 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.400086 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.400096 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.400108 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.400117 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.502235 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.502261 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.502294 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.502304 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.502312 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.604371 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.604403 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.604411 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.604423 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.604431 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.706204 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.706227 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.706235 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.706245 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.706253 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.808435 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.808466 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.808476 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.808488 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.808517 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.910442 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.910469 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.910478 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.910512 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:04 crc kubenswrapper[4583]: I1209 15:00:04.910522 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:04Z","lastTransitionTime":"2025-12-09T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.012588 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.012623 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.012632 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.012645 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.012654 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:05Z","lastTransitionTime":"2025-12-09T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.019697 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.019726 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.019736 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.019762 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.019770 4583 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-09T15:00:05Z","lastTransitionTime":"2025-12-09T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.045860 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c"] Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.046123 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.047543 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.047802 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.048279 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.048326 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.166675 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.166727 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.166775 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.166805 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.166848 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267494 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267548 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267617 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267633 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267652 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267712 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.267740 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.268358 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.272067 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.281011 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d4926a9f-ea97-488e-8ef9-c6cc5fbd150b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-k446c\" (UID: \"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.356536 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.421760 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" event={"ID":"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b","Type":"ContainerStarted","Data":"84a8d2919352160bc8fb6e90bfd026cbe7fafd7313178b824fb2cb0b7fb3efaa"} Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.421794 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" event={"ID":"d4926a9f-ea97-488e-8ef9-c6cc5fbd150b","Type":"ContainerStarted","Data":"baf29aadba3e5457dff3b14cf59a212ffadb2e441dc3941af5eda01b1c7ef98c"} Dec 09 15:00:05 crc kubenswrapper[4583]: I1209 15:00:05.430689 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-k446c" podStartSLOduration=83.430678267 podStartE2EDuration="1m23.430678267s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:05.429961252 +0000 UTC m=+103.637410340" watchObservedRunningTime="2025-12-09 15:00:05.430678267 +0000 UTC m=+103.638127354" Dec 09 15:00:06 crc kubenswrapper[4583]: I1209 15:00:06.078300 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:06 crc kubenswrapper[4583]: I1209 15:00:06.078338 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:06 crc kubenswrapper[4583]: I1209 15:00:06.078299 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:06 crc kubenswrapper[4583]: I1209 15:00:06.078942 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:06 crc kubenswrapper[4583]: E1209 15:00:06.079066 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:06 crc kubenswrapper[4583]: E1209 15:00:06.079136 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:06 crc kubenswrapper[4583]: E1209 15:00:06.079200 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:06 crc kubenswrapper[4583]: E1209 15:00:06.079270 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:08 crc kubenswrapper[4583]: I1209 15:00:08.078008 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:08 crc kubenswrapper[4583]: I1209 15:00:08.078048 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:08 crc kubenswrapper[4583]: I1209 15:00:08.078113 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:08 crc kubenswrapper[4583]: I1209 15:00:08.078211 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:08 crc kubenswrapper[4583]: E1209 15:00:08.078208 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:08 crc kubenswrapper[4583]: E1209 15:00:08.078263 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:08 crc kubenswrapper[4583]: E1209 15:00:08.078305 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:08 crc kubenswrapper[4583]: E1209 15:00:08.078354 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:10 crc kubenswrapper[4583]: I1209 15:00:10.078695 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:10 crc kubenswrapper[4583]: E1209 15:00:10.079210 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:10 crc kubenswrapper[4583]: I1209 15:00:10.078730 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:10 crc kubenswrapper[4583]: E1209 15:00:10.079403 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:10 crc kubenswrapper[4583]: I1209 15:00:10.078695 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:10 crc kubenswrapper[4583]: E1209 15:00:10.079582 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:10 crc kubenswrapper[4583]: I1209 15:00:10.078788 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:10 crc kubenswrapper[4583]: E1209 15:00:10.079775 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:12 crc kubenswrapper[4583]: I1209 15:00:12.078568 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:12 crc kubenswrapper[4583]: E1209 15:00:12.079401 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:12 crc kubenswrapper[4583]: I1209 15:00:12.079459 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:12 crc kubenswrapper[4583]: E1209 15:00:12.079562 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:12 crc kubenswrapper[4583]: I1209 15:00:12.079478 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:12 crc kubenswrapper[4583]: I1209 15:00:12.079460 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:12 crc kubenswrapper[4583]: E1209 15:00:12.079787 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:12 crc kubenswrapper[4583]: E1209 15:00:12.079867 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:14 crc kubenswrapper[4583]: I1209 15:00:14.078280 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:14 crc kubenswrapper[4583]: I1209 15:00:14.078319 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:14 crc kubenswrapper[4583]: E1209 15:00:14.078386 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:14 crc kubenswrapper[4583]: I1209 15:00:14.078436 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:14 crc kubenswrapper[4583]: I1209 15:00:14.078474 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:14 crc kubenswrapper[4583]: E1209 15:00:14.078590 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:14 crc kubenswrapper[4583]: E1209 15:00:14.078647 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:14 crc kubenswrapper[4583]: E1209 15:00:14.078797 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:15 crc kubenswrapper[4583]: I1209 15:00:15.443478 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/1.log" Dec 09 15:00:15 crc kubenswrapper[4583]: I1209 15:00:15.444458 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/0.log" Dec 09 15:00:15 crc kubenswrapper[4583]: I1209 15:00:15.444528 4583 generic.go:334] "Generic (PLEG): container finished" podID="c240c9e3-573f-4237-b433-deb0f21e0123" containerID="ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a" exitCode=1 Dec 09 15:00:15 crc kubenswrapper[4583]: I1209 15:00:15.444559 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerDied","Data":"ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a"} Dec 09 15:00:15 crc kubenswrapper[4583]: I1209 15:00:15.444606 4583 scope.go:117] "RemoveContainer" containerID="1e29bba71cc589a064fb6fee04b28f4f62f751a5ec758712bdbc2ab8d40b5076" Dec 09 15:00:15 crc kubenswrapper[4583]: I1209 15:00:15.445068 4583 scope.go:117] "RemoveContainer" containerID="ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a" Dec 09 15:00:15 crc kubenswrapper[4583]: E1209 15:00:15.445221 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-j2sh5_openshift-multus(c240c9e3-573f-4237-b433-deb0f21e0123)\"" pod="openshift-multus/multus-j2sh5" podUID="c240c9e3-573f-4237-b433-deb0f21e0123" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.078316 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.078390 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.078412 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.078412 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:16 crc kubenswrapper[4583]: E1209 15:00:16.078822 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:16 crc kubenswrapper[4583]: E1209 15:00:16.078877 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:16 crc kubenswrapper[4583]: E1209 15:00:16.079006 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.078956 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:00:16 crc kubenswrapper[4583]: E1209 15:00:16.079066 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.447609 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/1.log" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.449395 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/3.log" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.451181 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerStarted","Data":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.451527 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.470336 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podStartSLOduration=94.470320949 podStartE2EDuration="1m34.470320949s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:16.470252629 +0000 UTC m=+114.677701717" watchObservedRunningTime="2025-12-09 15:00:16.470320949 +0000 UTC m=+114.677770037" Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.690979 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9j9s6"] Dec 09 15:00:16 crc kubenswrapper[4583]: I1209 15:00:16.691102 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:16 crc kubenswrapper[4583]: E1209 15:00:16.691181 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:18 crc kubenswrapper[4583]: I1209 15:00:18.077876 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:18 crc kubenswrapper[4583]: I1209 15:00:18.077970 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:18 crc kubenswrapper[4583]: E1209 15:00:18.077967 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:18 crc kubenswrapper[4583]: I1209 15:00:18.077989 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:18 crc kubenswrapper[4583]: I1209 15:00:18.078011 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:18 crc kubenswrapper[4583]: E1209 15:00:18.078055 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:18 crc kubenswrapper[4583]: E1209 15:00:18.078105 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:18 crc kubenswrapper[4583]: E1209 15:00:18.078198 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:20 crc kubenswrapper[4583]: I1209 15:00:20.077802 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:20 crc kubenswrapper[4583]: I1209 15:00:20.078042 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:20 crc kubenswrapper[4583]: I1209 15:00:20.078059 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:20 crc kubenswrapper[4583]: E1209 15:00:20.078147 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:20 crc kubenswrapper[4583]: I1209 15:00:20.078167 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:20 crc kubenswrapper[4583]: E1209 15:00:20.078276 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:20 crc kubenswrapper[4583]: E1209 15:00:20.078475 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:20 crc kubenswrapper[4583]: E1209 15:00:20.078826 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:22 crc kubenswrapper[4583]: I1209 15:00:22.077770 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:22 crc kubenswrapper[4583]: I1209 15:00:22.077831 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:22 crc kubenswrapper[4583]: I1209 15:00:22.078542 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:22 crc kubenswrapper[4583]: E1209 15:00:22.078531 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:22 crc kubenswrapper[4583]: I1209 15:00:22.078585 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:22 crc kubenswrapper[4583]: E1209 15:00:22.078651 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:22 crc kubenswrapper[4583]: E1209 15:00:22.078691 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:22 crc kubenswrapper[4583]: E1209 15:00:22.078755 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:22 crc kubenswrapper[4583]: E1209 15:00:22.136954 4583 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 09 15:00:22 crc kubenswrapper[4583]: E1209 15:00:22.164276 4583 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 09 15:00:24 crc kubenswrapper[4583]: I1209 15:00:24.078236 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:24 crc kubenswrapper[4583]: I1209 15:00:24.078253 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:24 crc kubenswrapper[4583]: I1209 15:00:24.078268 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:24 crc kubenswrapper[4583]: I1209 15:00:24.078291 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:24 crc kubenswrapper[4583]: E1209 15:00:24.078335 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:24 crc kubenswrapper[4583]: E1209 15:00:24.078424 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:24 crc kubenswrapper[4583]: E1209 15:00:24.078495 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:24 crc kubenswrapper[4583]: E1209 15:00:24.078575 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:26 crc kubenswrapper[4583]: I1209 15:00:26.077806 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:26 crc kubenswrapper[4583]: I1209 15:00:26.077867 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:26 crc kubenswrapper[4583]: E1209 15:00:26.077925 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:26 crc kubenswrapper[4583]: I1209 15:00:26.077974 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:26 crc kubenswrapper[4583]: I1209 15:00:26.077806 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:26 crc kubenswrapper[4583]: E1209 15:00:26.078022 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:26 crc kubenswrapper[4583]: E1209 15:00:26.078098 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:26 crc kubenswrapper[4583]: E1209 15:00:26.078177 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:27 crc kubenswrapper[4583]: E1209 15:00:27.166454 4583 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 09 15:00:28 crc kubenswrapper[4583]: I1209 15:00:28.078269 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:28 crc kubenswrapper[4583]: I1209 15:00:28.078332 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:28 crc kubenswrapper[4583]: I1209 15:00:28.078354 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:28 crc kubenswrapper[4583]: E1209 15:00:28.078370 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:28 crc kubenswrapper[4583]: E1209 15:00:28.078406 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:28 crc kubenswrapper[4583]: E1209 15:00:28.078493 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:28 crc kubenswrapper[4583]: I1209 15:00:28.078543 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:28 crc kubenswrapper[4583]: E1209 15:00:28.078589 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.078343 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.078423 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:30 crc kubenswrapper[4583]: E1209 15:00:30.078527 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.078568 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.078587 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:30 crc kubenswrapper[4583]: E1209 15:00:30.078626 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:30 crc kubenswrapper[4583]: E1209 15:00:30.078665 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:30 crc kubenswrapper[4583]: E1209 15:00:30.078929 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.079126 4583 scope.go:117] "RemoveContainer" containerID="ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.484781 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/1.log" Dec 09 15:00:30 crc kubenswrapper[4583]: I1209 15:00:30.485172 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerStarted","Data":"dfd7f9f7c2999475afdbb72006e6582db9d7b1ea6a414d3091a6f918b7c37df9"} Dec 09 15:00:32 crc kubenswrapper[4583]: I1209 15:00:32.078557 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:32 crc kubenswrapper[4583]: I1209 15:00:32.078620 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:32 crc kubenswrapper[4583]: I1209 15:00:32.078620 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:32 crc kubenswrapper[4583]: E1209 15:00:32.079260 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9j9s6" podUID="b5848176-adf1-47d0-b043-1b305357d808" Dec 09 15:00:32 crc kubenswrapper[4583]: I1209 15:00:32.079275 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:32 crc kubenswrapper[4583]: E1209 15:00:32.079334 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 09 15:00:32 crc kubenswrapper[4583]: E1209 15:00:32.079348 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 09 15:00:32 crc kubenswrapper[4583]: E1209 15:00:32.079406 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.078673 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.078712 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.078695 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.078874 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.080098 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.080379 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.080691 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.080788 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.080822 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 09 15:00:34 crc kubenswrapper[4583]: I1209 15:00:34.081513 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.501466 4583 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.528395 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9k4b2"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.528672 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.530457 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.530695 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.545845 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.545852 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.545887 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.545886 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.546027 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.546075 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.546227 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.550533 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.552196 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.552363 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.552496 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.552694 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tvq7j"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.552990 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.559579 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wjg5r"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.559880 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.563573 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.563591 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.563592 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.564488 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.564607 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.564629 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.564831 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.564671 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.565089 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.565299 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.565515 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.565631 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.566484 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zzls8"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.566895 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.572878 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.572896 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.572881 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.573295 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.573306 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.573485 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.573522 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.573765 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.574014 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.574479 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.574602 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mh54x"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.574996 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.575596 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.575951 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.576507 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.577597 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.587908 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.588477 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.588993 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589074 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589421 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589447 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589610 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589706 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589835 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589877 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.589973 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.590108 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.590127 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.590136 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.590153 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.594102 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.594279 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.594385 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.594813 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.595064 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.596210 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.596442 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-phvcn"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.597234 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.596459 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.600633 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.596995 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598124 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598171 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598386 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598426 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598458 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598485 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598586 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598627 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598662 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598717 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.598782 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615103 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615348 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615393 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615363 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615575 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615900 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615992 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.615902 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616164 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-serving-cert\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616185 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-etcd-client\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616100 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-csjlq"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616208 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-serving-cert\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616222 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616240 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-audit-policies\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616257 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fxws\" (UniqueName: \"kubernetes.io/projected/a4ac6176-8382-4407-93d4-b74e1ec54a30-kube-api-access-5fxws\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616270 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4ac6176-8382-4407-93d4-b74e1ec54a30-audit-dir\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616289 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-trusted-ca\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616329 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l5vd\" (UniqueName: \"kubernetes.io/projected/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-kube-api-access-7l5vd\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616408 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-encryption-config\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616440 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-config\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616597 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616709 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616885 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.616993 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.618350 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m7rq7"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.618981 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.619699 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.619895 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.619990 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.620626 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.620699 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.620792 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.620857 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.620920 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.624186 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-lhdvm"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.624556 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.624858 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ls45p"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.625258 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.626478 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-qw8lk"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.626847 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.629328 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.632061 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lwsz7"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.632482 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.639935 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.640506 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.640854 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.641839 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.642301 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.644196 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.644850 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645160 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645289 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645377 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645403 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645459 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645630 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.645865 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.646179 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.646538 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.646618 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.646690 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.655163 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.655474 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.656067 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.656284 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.656540 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.656605 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.658292 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.658412 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.662002 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.662164 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.664815 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.669131 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.669504 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.670371 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.670865 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.671078 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.671566 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.671781 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kdmx6"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.672130 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.672309 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.672345 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.672889 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.673782 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.674243 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.674368 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.674881 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.675264 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.675586 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.676069 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nfx8b"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.676368 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.676833 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kxklc"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.677150 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.677532 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.677861 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.680385 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.680796 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.681624 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.682477 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.682536 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-n85fv"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.683077 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.683457 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.684058 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.684793 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.685158 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.685735 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.686548 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.686677 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.687833 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.689099 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tvq7j"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.690002 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.690103 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.690938 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wjg5r"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.691971 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m7rq7"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.693206 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ls45p"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.694044 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.695225 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zzls8"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.696229 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-phvcn"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.697112 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.698095 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.699305 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mh54x"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.701156 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lwsz7"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.702412 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.703402 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9k4b2"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.704641 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.706208 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kdmx6"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.707084 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-lp2s6"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.707917 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.707991 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-bsppj"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.708690 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.709146 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.710112 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.712346 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nfx8b"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.714284 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.716445 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qw8lk"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717248 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717281 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/54599197-2100-442c-8dbb-f536092eb76a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lds5j\" (UID: \"54599197-2100-442c-8dbb-f536092eb76a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717305 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-config\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717321 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-config\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717338 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-client\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717352 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbqnk\" (UniqueName: \"kubernetes.io/projected/f706d2ca-98e9-4f2f-849f-8ec937f492a5-kube-api-access-wbqnk\") pod \"dns-operator-744455d44c-lwsz7\" (UID: \"f706d2ca-98e9-4f2f-849f-8ec937f492a5\") " pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717375 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717389 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxh9g\" (UniqueName: \"kubernetes.io/projected/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-kube-api-access-nxh9g\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717406 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd7hn\" (UniqueName: \"kubernetes.io/projected/68cd346a-3a57-473b-8a06-9e3823688014-kube-api-access-qd7hn\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717422 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717437 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-service-ca-bundle\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717451 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-serving-cert\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717464 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e08c725-bc3f-4816-bf4c-a6bdecded47e-serving-cert\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717479 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-default-certificate\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717493 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717506 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-node-pullsecrets\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717527 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40b62828-d0ca-4668-b621-9b963a53be7d-config\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717541 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/40b62828-d0ca-4668-b621-9b963a53be7d-images\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717553 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/40b62828-d0ca-4668-b621-9b963a53be7d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717566 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717579 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-config\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717594 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wchwm\" (UniqueName: \"kubernetes.io/projected/b45028e1-4454-4ec1-ac67-d470a334272c-kube-api-access-wchwm\") pod \"downloads-7954f5f757-qw8lk\" (UID: \"b45028e1-4454-4ec1-ac67-d470a334272c\") " pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717606 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-ca\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717626 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-serving-cert\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717640 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-etcd-client\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717652 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40445bd9-9b02-4602-b071-bb4da0da53b6-metrics-tls\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717668 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-serving-cert\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717684 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717697 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9bx7\" (UniqueName: \"kubernetes.io/projected/45ee36e9-1af4-4c81-b1bb-563472694c43-kube-api-access-d9bx7\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717712 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4d08068f-9519-41e4-9361-1810f60abdcf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717727 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-audit-policies\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717771 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-service-ca\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717787 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-etcd-client\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717802 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fxws\" (UniqueName: \"kubernetes.io/projected/a4ac6176-8382-4407-93d4-b74e1ec54a30-kube-api-access-5fxws\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717814 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16821b35-4f2e-4733-a44c-3e3a8caaf98a-serving-cert\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717829 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4ac6176-8382-4407-93d4-b74e1ec54a30-audit-dir\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717846 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntg2d\" (UniqueName: \"kubernetes.io/projected/40445bd9-9b02-4602-b071-bb4da0da53b6-kube-api-access-ntg2d\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717859 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-config\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717876 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717893 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717909 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-trusted-ca\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717924 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9st9q\" (UniqueName: \"kubernetes.io/projected/8f53465c-6116-44fc-a09c-0a68311aef9d-kube-api-access-9st9q\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717936 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-audit\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717950 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-client-ca\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717962 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-dir\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717975 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhrpf\" (UniqueName: \"kubernetes.io/projected/1e08c725-bc3f-4816-bf4c-a6bdecded47e-kube-api-access-vhrpf\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.717990 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-etcd-serving-ca\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718020 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jw8s\" (UniqueName: \"kubernetes.io/projected/24d1746e-1acb-433b-b1ea-c1d1aa756176-kube-api-access-6jw8s\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718025 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-config\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718035 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzzj2\" (UniqueName: \"kubernetes.io/projected/16821b35-4f2e-4733-a44c-3e3a8caaf98a-kube-api-access-xzzj2\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718075 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-config\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718119 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718137 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718528 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l5vd\" (UniqueName: \"kubernetes.io/projected/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-kube-api-access-7l5vd\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718550 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718583 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718628 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-config\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718648 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718663 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718676 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718774 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwt62\" (UniqueName: \"kubernetes.io/projected/4d08068f-9519-41e4-9361-1810f60abdcf-kube-api-access-rwt62\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718803 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45ee36e9-1af4-4c81-b1bb-563472694c43-serving-cert\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718819 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5x8p\" (UniqueName: \"kubernetes.io/projected/40b62828-d0ca-4668-b621-9b963a53be7d-kube-api-access-x5x8p\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718834 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718849 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-metrics-certs\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718862 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-policies\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718877 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-encryption-config\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718891 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-audit-dir\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.718932 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kxklc"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719016 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-encryption-config\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719034 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719050 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/40445bd9-9b02-4602-b071-bb4da0da53b6-trusted-ca\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719066 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719226 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719491 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4ac6176-8382-4407-93d4-b74e1ec54a30-audit-policies\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719561 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-trusted-ca\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719572 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4ac6176-8382-4407-93d4-b74e1ec54a30-audit-dir\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719627 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nznn8\" (UniqueName: \"kubernetes.io/projected/54599197-2100-442c-8dbb-f536092eb76a-kube-api-access-nznn8\") pod \"cluster-samples-operator-665b6dd947-lds5j\" (UID: \"54599197-2100-442c-8dbb-f536092eb76a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719656 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719729 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/68cd346a-3a57-473b-8a06-9e3823688014-service-ca-bundle\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719808 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719831 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719853 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/40445bd9-9b02-4602-b071-bb4da0da53b6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719857 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719871 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-stats-auth\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719887 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f53465c-6116-44fc-a09c-0a68311aef9d-serving-cert\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719912 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d08068f-9519-41e4-9361-1810f60abdcf-serving-cert\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719968 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-image-import-ca\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.719995 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zqj2\" (UniqueName: \"kubernetes.io/projected/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-kube-api-access-5zqj2\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.720011 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-client-ca\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.720028 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f706d2ca-98e9-4f2f-849f-8ec937f492a5-metrics-tls\") pod \"dns-operator-744455d44c-lwsz7\" (UID: \"f706d2ca-98e9-4f2f-849f-8ec937f492a5\") " pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.720620 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.721434 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.722051 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-serving-cert\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.722273 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-serving-cert\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.722700 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-encryption-config\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.722776 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4ac6176-8382-4407-93d4-b74e1ec54a30-etcd-client\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.723671 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.724511 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.725318 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.726102 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.726890 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.727679 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-n85fv"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.728462 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.729928 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bsppj"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.729942 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.730929 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.731888 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-csjlq"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.733110 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-d8gqr"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.733939 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.734211 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-lp2s6"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.735167 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-d8gqr"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.748606 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-jn54c"] Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.749200 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.750044 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.770325 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.789989 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.810066 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820334 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-config\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820363 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd7hn\" (UniqueName: \"kubernetes.io/projected/68cd346a-3a57-473b-8a06-9e3823688014-kube-api-access-qd7hn\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820384 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-client\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820400 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbqnk\" (UniqueName: \"kubernetes.io/projected/f706d2ca-98e9-4f2f-849f-8ec937f492a5-kube-api-access-wbqnk\") pod \"dns-operator-744455d44c-lwsz7\" (UID: \"f706d2ca-98e9-4f2f-849f-8ec937f492a5\") " pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820414 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820429 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxh9g\" (UniqueName: \"kubernetes.io/projected/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-kube-api-access-nxh9g\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820444 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-service-ca-bundle\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820777 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-serving-cert\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820807 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e08c725-bc3f-4816-bf4c-a6bdecded47e-serving-cert\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820825 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-node-pullsecrets\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820839 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-default-certificate\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820857 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820881 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wchwm\" (UniqueName: \"kubernetes.io/projected/b45028e1-4454-4ec1-ac67-d470a334272c-kube-api-access-wchwm\") pod \"downloads-7954f5f757-qw8lk\" (UID: \"b45028e1-4454-4ec1-ac67-d470a334272c\") " pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820899 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40b62828-d0ca-4668-b621-9b963a53be7d-config\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820912 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/40b62828-d0ca-4668-b621-9b963a53be7d-images\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820927 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/40b62828-d0ca-4668-b621-9b963a53be7d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820944 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820958 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-config\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820972 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-ca\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.820989 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40445bd9-9b02-4602-b071-bb4da0da53b6-metrics-tls\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821004 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9bx7\" (UniqueName: \"kubernetes.io/projected/45ee36e9-1af4-4c81-b1bb-563472694c43-kube-api-access-d9bx7\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821019 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4d08068f-9519-41e4-9361-1810f60abdcf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821034 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16821b35-4f2e-4733-a44c-3e3a8caaf98a-serving-cert\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821048 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-service-ca\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821062 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-etcd-client\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821081 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntg2d\" (UniqueName: \"kubernetes.io/projected/40445bd9-9b02-4602-b071-bb4da0da53b6-kube-api-access-ntg2d\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821095 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-config\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821100 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-node-pullsecrets\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821111 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821142 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821166 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9st9q\" (UniqueName: \"kubernetes.io/projected/8f53465c-6116-44fc-a09c-0a68311aef9d-kube-api-access-9st9q\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821183 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-audit\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821201 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhrpf\" (UniqueName: \"kubernetes.io/projected/1e08c725-bc3f-4816-bf4c-a6bdecded47e-kube-api-access-vhrpf\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821217 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-client-ca\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821232 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-dir\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821247 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jw8s\" (UniqueName: \"kubernetes.io/projected/24d1746e-1acb-433b-b1ea-c1d1aa756176-kube-api-access-6jw8s\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821261 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-etcd-serving-ca\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821294 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzzj2\" (UniqueName: \"kubernetes.io/projected/16821b35-4f2e-4733-a44c-3e3a8caaf98a-kube-api-access-xzzj2\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821308 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-config\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821323 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821344 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821361 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821374 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821377 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-service-ca-bundle\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821388 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-config\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821402 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821416 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45ee36e9-1af4-4c81-b1bb-563472694c43-serving-cert\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821419 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821431 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwt62\" (UniqueName: \"kubernetes.io/projected/4d08068f-9519-41e4-9361-1810f60abdcf-kube-api-access-rwt62\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821436 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-config\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821473 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5x8p\" (UniqueName: \"kubernetes.io/projected/40b62828-d0ca-4668-b621-9b963a53be7d-kube-api-access-x5x8p\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821503 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821519 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-metrics-certs\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821535 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-policies\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821548 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-encryption-config\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821562 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-audit-dir\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821588 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821603 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/40445bd9-9b02-4602-b071-bb4da0da53b6-trusted-ca\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821619 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821643 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nznn8\" (UniqueName: \"kubernetes.io/projected/54599197-2100-442c-8dbb-f536092eb76a-kube-api-access-nznn8\") pod \"cluster-samples-operator-665b6dd947-lds5j\" (UID: \"54599197-2100-442c-8dbb-f536092eb76a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821657 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821672 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/68cd346a-3a57-473b-8a06-9e3823688014-service-ca-bundle\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821686 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/40445bd9-9b02-4602-b071-bb4da0da53b6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821700 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821715 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821729 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-stats-auth\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821737 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821781 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f53465c-6116-44fc-a09c-0a68311aef9d-serving-cert\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821811 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d08068f-9519-41e4-9361-1810f60abdcf-serving-cert\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821830 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f706d2ca-98e9-4f2f-849f-8ec937f492a5-metrics-tls\") pod \"dns-operator-744455d44c-lwsz7\" (UID: \"f706d2ca-98e9-4f2f-849f-8ec937f492a5\") " pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821845 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-image-import-ca\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821860 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zqj2\" (UniqueName: \"kubernetes.io/projected/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-kube-api-access-5zqj2\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821873 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-client-ca\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821889 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/54599197-2100-442c-8dbb-f536092eb76a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lds5j\" (UID: \"54599197-2100-442c-8dbb-f536092eb76a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821905 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.821974 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-ca\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823371 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-serving-cert\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823557 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823616 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e08c725-bc3f-4816-bf4c-a6bdecded47e-serving-cert\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823656 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-default-certificate\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823691 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f53465c-6116-44fc-a09c-0a68311aef9d-serving-cert\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823799 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-etcd-client\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823852 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-dir\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823915 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16821b35-4f2e-4733-a44c-3e3a8caaf98a-serving-cert\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.823941 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-service-ca\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.824171 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.824176 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-audit\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.824201 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45ee36e9-1af4-4c81-b1bb-563472694c43-config\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.824382 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/40b62828-d0ca-4668-b621-9b963a53be7d-images\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.824491 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-client-ca\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.824880 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825013 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-config\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825160 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-etcd-serving-ca\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825410 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4d08068f-9519-41e4-9361-1810f60abdcf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825483 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-image-import-ca\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825564 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-audit-dir\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825660 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-client-ca\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826063 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f53465c-6116-44fc-a09c-0a68311aef9d-config\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826182 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826271 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826408 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/68cd346a-3a57-473b-8a06-9e3823688014-service-ca-bundle\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826416 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826441 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.825485 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40b62828-d0ca-4668-b621-9b963a53be7d-config\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.826704 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.827023 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-config\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.827230 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-policies\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.827240 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-stats-auth\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.827737 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/68cd346a-3a57-473b-8a06-9e3823688014-metrics-certs\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.827801 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.827815 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.828184 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/40b62828-d0ca-4668-b621-9b963a53be7d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.828525 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d08068f-9519-41e4-9361-1810f60abdcf-serving-cert\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.828554 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.828559 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.828555 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.828917 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.829013 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45ee36e9-1af4-4c81-b1bb-563472694c43-serving-cert\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.829206 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-encryption-config\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.829309 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/54599197-2100-442c-8dbb-f536092eb76a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lds5j\" (UID: \"54599197-2100-442c-8dbb-f536092eb76a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.839406 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f53465c-6116-44fc-a09c-0a68311aef9d-etcd-client\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.850128 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.870496 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.890352 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.911080 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.930724 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.937722 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f706d2ca-98e9-4f2f-849f-8ec937f492a5-metrics-tls\") pod \"dns-operator-744455d44c-lwsz7\" (UID: \"f706d2ca-98e9-4f2f-849f-8ec937f492a5\") " pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.950159 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.970326 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 09 15:00:35 crc kubenswrapper[4583]: I1209 15:00:35.989988 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.010726 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.029916 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.037399 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40445bd9-9b02-4602-b071-bb4da0da53b6-metrics-tls\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.055153 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.064120 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/40445bd9-9b02-4602-b071-bb4da0da53b6-trusted-ca\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.070687 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.110218 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.130494 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.150540 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.170651 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.190068 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.210328 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.229785 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.250707 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.270788 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.290991 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.310551 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.330901 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.349938 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.370516 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.390711 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.410010 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.430122 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.451007 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.470421 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.491011 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.510918 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.530163 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.550398 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.570684 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.589992 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.610791 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.630124 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.649960 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.670098 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.689497 4583 request.go:700] Waited for 1.013768874s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dpackageserver-service-cert&limit=500&resourceVersion=0 Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.690414 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.709772 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.730033 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.750896 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.774320 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.790596 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.809975 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.830697 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.850646 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.870540 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.890478 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.910577 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.930679 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.949867 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.970598 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 09 15:00:36 crc kubenswrapper[4583]: I1209 15:00:36.989906 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.010074 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.030621 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.050710 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.070438 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.090699 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.109985 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.130718 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.155091 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.170497 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.190619 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.210492 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.230711 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.250097 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.270829 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.289908 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.310164 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.330422 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.350680 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.370000 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.390805 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.410276 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.430541 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.451144 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.482177 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l5vd\" (UniqueName: \"kubernetes.io/projected/c11b79e8-8b8c-45a3-8ab4-53be0b141ea5-kube-api-access-7l5vd\") pod \"console-operator-58897d9998-9k4b2\" (UID: \"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5\") " pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.501113 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fxws\" (UniqueName: \"kubernetes.io/projected/a4ac6176-8382-4407-93d4-b74e1ec54a30-kube-api-access-5fxws\") pod \"apiserver-7bbb656c7d-hx7gd\" (UID: \"a4ac6176-8382-4407-93d4-b74e1ec54a30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.509828 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.531226 4583 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.550850 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.570135 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.590343 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.609871 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.638546 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.641476 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbqnk\" (UniqueName: \"kubernetes.io/projected/f706d2ca-98e9-4f2f-849f-8ec937f492a5-kube-api-access-wbqnk\") pod \"dns-operator-744455d44c-lwsz7\" (UID: \"f706d2ca-98e9-4f2f-849f-8ec937f492a5\") " pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.643339 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.661942 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxh9g\" (UniqueName: \"kubernetes.io/projected/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-kube-api-access-nxh9g\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.682076 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwt62\" (UniqueName: \"kubernetes.io/projected/4d08068f-9519-41e4-9361-1810f60abdcf-kube-api-access-rwt62\") pod \"openshift-config-operator-7777fb866f-ls45p\" (UID: \"4d08068f-9519-41e4-9361-1810f60abdcf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.689998 4583 request.go:700] Waited for 1.867340564s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/serviceaccounts/router/token Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.702948 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd7hn\" (UniqueName: \"kubernetes.io/projected/68cd346a-3a57-473b-8a06-9e3823688014-kube-api-access-qd7hn\") pod \"router-default-5444994796-lhdvm\" (UID: \"68cd346a-3a57-473b-8a06-9e3823688014\") " pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.732978 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5x8p\" (UniqueName: \"kubernetes.io/projected/40b62828-d0ca-4668-b621-9b963a53be7d-kube-api-access-x5x8p\") pod \"machine-api-operator-5694c8668f-wjg5r\" (UID: \"40b62828-d0ca-4668-b621-9b963a53be7d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.741876 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/24b1196a-b6e9-4094-8a7d-93b9a8e7ede0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-f6n4h\" (UID: \"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.758345 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd"] Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.764830 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9st9q\" (UniqueName: \"kubernetes.io/projected/8f53465c-6116-44fc-a09c-0a68311aef9d-kube-api-access-9st9q\") pod \"etcd-operator-b45778765-m7rq7\" (UID: \"8f53465c-6116-44fc-a09c-0a68311aef9d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.772842 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9k4b2"] Dec 09 15:00:37 crc kubenswrapper[4583]: W1209 15:00:37.777110 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc11b79e8_8b8c_45a3_8ab4_53be0b141ea5.slice/crio-262afe1cc5bdb75e6f6e026f2ed2c7ef69e732382506d9e5145c26e5a0e6f1ed WatchSource:0}: Error finding container 262afe1cc5bdb75e6f6e026f2ed2c7ef69e732382506d9e5145c26e5a0e6f1ed: Status 404 returned error can't find the container with id 262afe1cc5bdb75e6f6e026f2ed2c7ef69e732382506d9e5145c26e5a0e6f1ed Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.778086 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.781054 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhrpf\" (UniqueName: \"kubernetes.io/projected/1e08c725-bc3f-4816-bf4c-a6bdecded47e-kube-api-access-vhrpf\") pod \"route-controller-manager-6576b87f9c-vqh9c\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.787206 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.792927 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.796616 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.802362 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wchwm\" (UniqueName: \"kubernetes.io/projected/b45028e1-4454-4ec1-ac67-d470a334272c-kube-api-access-wchwm\") pod \"downloads-7954f5f757-qw8lk\" (UID: \"b45028e1-4454-4ec1-ac67-d470a334272c\") " pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.802730 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.808523 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.824665 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9bx7\" (UniqueName: \"kubernetes.io/projected/45ee36e9-1af4-4c81-b1bb-563472694c43-kube-api-access-d9bx7\") pod \"authentication-operator-69f744f599-mh54x\" (UID: \"45ee36e9-1af4-4c81-b1bb-563472694c43\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.846183 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/40445bd9-9b02-4602-b071-bb4da0da53b6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.866607 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntg2d\" (UniqueName: \"kubernetes.io/projected/40445bd9-9b02-4602-b071-bb4da0da53b6-kube-api-access-ntg2d\") pod \"ingress-operator-5b745b69d9-kxl9t\" (UID: \"40445bd9-9b02-4602-b071-bb4da0da53b6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.883578 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nznn8\" (UniqueName: \"kubernetes.io/projected/54599197-2100-442c-8dbb-f536092eb76a-kube-api-access-nznn8\") pod \"cluster-samples-operator-665b6dd947-lds5j\" (UID: \"54599197-2100-442c-8dbb-f536092eb76a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.902694 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jw8s\" (UniqueName: \"kubernetes.io/projected/24d1746e-1acb-433b-b1ea-c1d1aa756176-kube-api-access-6jw8s\") pod \"oauth-openshift-558db77b4-phvcn\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.922458 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zqj2\" (UniqueName: \"kubernetes.io/projected/6baa2305-cdf4-4911-b91c-e26ee8ab9f64-kube-api-access-5zqj2\") pod \"apiserver-76f77b778f-zzls8\" (UID: \"6baa2305-cdf4-4911-b91c-e26ee8ab9f64\") " pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.945361 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzzj2\" (UniqueName: \"kubernetes.io/projected/16821b35-4f2e-4733-a44c-3e3a8caaf98a-kube-api-access-xzzj2\") pod \"controller-manager-879f6c89f-tvq7j\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.961548 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.968181 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" Dec 09 15:00:37 crc kubenswrapper[4583]: I1209 15:00:37.989290 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.001828 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.025417 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043367 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-machine-approver-tls\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043409 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-bound-sa-token\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043431 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-config\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043453 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-config\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043473 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043526 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f6163f29-831f-435e-8410-9f08f394ca5f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043543 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86mdz\" (UniqueName: \"kubernetes.io/projected/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-kube-api-access-86mdz\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043558 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-trusted-ca\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043596 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043670 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45kns\" (UniqueName: \"kubernetes.io/projected/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-kube-api-access-45kns\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043694 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-registry-tls\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043787 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-auth-proxy-config\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043817 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-575qd\" (UniqueName: \"kubernetes.io/projected/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-kube-api-access-575qd\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043867 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69l7c\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-kube-api-access-69l7c\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043886 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043903 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-registry-certificates\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043921 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f6163f29-831f-435e-8410-9f08f394ca5f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.043965 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.044181 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:38.544169915 +0000 UTC m=+136.751619003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.056545 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.070191 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.088594 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tvq7j"] Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.096631 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16821b35_4f2e_4733_a44c_3e3a8caaf98a.slice/crio-26fd81b1f6bdf80a69806d25d0c6be9b66dd2288d7b6378bdddd6b9c5e5c069d WatchSource:0}: Error finding container 26fd81b1f6bdf80a69806d25d0c6be9b66dd2288d7b6378bdddd6b9c5e5c069d: Status 404 returned error can't find the container with id 26fd81b1f6bdf80a69806d25d0c6be9b66dd2288d7b6378bdddd6b9c5e5c069d Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.112295 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wjg5r"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.114992 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.132265 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40b62828_d0ca_4668_b621_9b963a53be7d.slice/crio-6305c3292fb3160c058025a43f4e3379f949d9a9fa2eb6fce02028558b793462 WatchSource:0}: Error finding container 6305c3292fb3160c058025a43f4e3379f949d9a9fa2eb6fce02028558b793462: Status 404 returned error can't find the container with id 6305c3292fb3160c058025a43f4e3379f949d9a9fa2eb6fce02028558b793462 Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.144876 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145039 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf8b18df-6138-4713-ba81-d0e67d9ba636-cert\") pod \"ingress-canary-lp2s6\" (UID: \"bf8b18df-6138-4713-ba81-d0e67d9ba636\") " pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145082 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s84gj\" (UniqueName: \"kubernetes.io/projected/bcb77eca-2637-4767-9144-25f235e20787-kube-api-access-s84gj\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145098 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2d81f678-276b-4f7c-82e2-d8ebc5c91c27-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kdmx6\" (UID: \"2d81f678-276b-4f7c-82e2-d8ebc5c91c27\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145114 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-plugins-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145138 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbbx4\" (UniqueName: \"kubernetes.io/projected/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-kube-api-access-zbbx4\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145163 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-575qd\" (UniqueName: \"kubernetes.io/projected/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-kube-api-access-575qd\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145180 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7867e90f-b983-4b90-a4de-3615ceeed726-proxy-tls\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145203 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf6tk\" (UniqueName: \"kubernetes.io/projected/2d81f678-276b-4f7c-82e2-d8ebc5c91c27-kube-api-access-rf6tk\") pod \"multus-admission-controller-857f4d67dd-kdmx6\" (UID: \"2d81f678-276b-4f7c-82e2-d8ebc5c91c27\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145217 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145233 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfws4\" (UniqueName: \"kubernetes.io/projected/1ad4fa22-4664-425f-8a79-e15db99ff216-kube-api-access-wfws4\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145260 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f6163f29-831f-435e-8410-9f08f394ca5f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145287 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e34d8df-b4af-4781-a1c9-10de3c2a8717-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-j8gp4\" (UID: \"5e34d8df-b4af-4781-a1c9-10de3c2a8717\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145303 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa21087-4a5b-4458-b094-558ce575aa7a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145316 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-config\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145359 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-config\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145374 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b322636f-a692-4861-aad1-de2bee7eceee-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145405 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fda0580-6dfb-41d7-b8a1-853b46605cfc-config-volume\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145417 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b322636f-a692-4861-aad1-de2bee7eceee-config\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145431 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-serving-cert\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145456 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-machine-approver-tls\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145477 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-mountpoint-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145492 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxmp\" (UniqueName: \"kubernetes.io/projected/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-kube-api-access-ppxmp\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145508 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-bound-sa-token\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145521 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4gp7\" (UniqueName: \"kubernetes.io/projected/63f028f1-f3fc-46f9-9eec-471fa53871ed-kube-api-access-q4gp7\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145535 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-config\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145548 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9lhl\" (UniqueName: \"kubernetes.io/projected/bf8b18df-6138-4713-ba81-d0e67d9ba636-kube-api-access-w9lhl\") pod \"ingress-canary-lp2s6\" (UID: \"bf8b18df-6138-4713-ba81-d0e67d9ba636\") " pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145563 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmhss\" (UniqueName: \"kubernetes.io/projected/5e34d8df-b4af-4781-a1c9-10de3c2a8717-kube-api-access-kmhss\") pod \"control-plane-machine-set-operator-78cbb6b69f-j8gp4\" (UID: \"5e34d8df-b4af-4781-a1c9-10de3c2a8717\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145587 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-oauth-serving-cert\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145600 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cfa21087-4a5b-4458-b094-558ce575aa7a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145618 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a270cd6-ec60-4090-bc24-b93bf929a664-config\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145639 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-serving-cert\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145670 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-service-ca\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145686 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmp95\" (UniqueName: \"kubernetes.io/projected/62060a19-8a42-42fc-acba-effc112e736b-kube-api-access-tmp95\") pod \"package-server-manager-789f6589d5-qdzns\" (UID: \"62060a19-8a42-42fc-acba-effc112e736b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145700 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/62060a19-8a42-42fc-acba-effc112e736b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qdzns\" (UID: \"62060a19-8a42-42fc-acba-effc112e736b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145718 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm8wt\" (UniqueName: \"kubernetes.io/projected/2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341-kube-api-access-fm8wt\") pod \"migrator-59844c95c7-vt9jk\" (UID: \"2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145733 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76rxq\" (UniqueName: \"kubernetes.io/projected/6fda0580-6dfb-41d7-b8a1-853b46605cfc-kube-api-access-76rxq\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145784 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145803 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45kns\" (UniqueName: \"kubernetes.io/projected/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-kube-api-access-45kns\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145818 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1ad4fa22-4664-425f-8a79-e15db99ff216-profile-collector-cert\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145832 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-config-volume\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145845 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145854 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crp48\" (UniqueName: \"kubernetes.io/projected/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-kube-api-access-crp48\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.145923 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:38.645910135 +0000 UTC m=+136.853359223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145968 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.145987 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ae7e8a7b-c39c-4a3e-a017-232a176966c7-tmpfs\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146005 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcxvl\" (UniqueName: \"kubernetes.io/projected/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-kube-api-access-kcxvl\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146024 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b322636f-a692-4861-aad1-de2bee7eceee-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146039 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9ba6bb0e-62bc-40d8-ae31-13c35837646b-signing-cabundle\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146065 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-257zk\" (UniqueName: \"kubernetes.io/projected/49ad48a0-5ad4-4187-a4eb-022a38173868-kube-api-access-257zk\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146083 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-auth-proxy-config\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146108 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa21087-4a5b-4458-b094-558ce575aa7a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146133 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/468b0644-ec7c-4009-8d15-c3318642027c-srv-cert\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146160 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69l7c\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-kube-api-access-69l7c\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146175 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146192 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt2hd\" (UniqueName: \"kubernetes.io/projected/468b0644-ec7c-4009-8d15-c3318642027c-kube-api-access-jt2hd\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146206 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ae7e8a7b-c39c-4a3e-a017-232a176966c7-webhook-cert\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146221 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g75tg\" (UniqueName: \"kubernetes.io/projected/ae7e8a7b-c39c-4a3e-a017-232a176966c7-kube-api-access-g75tg\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146245 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-registry-certificates\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146277 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-registration-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146291 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146308 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-oauth-config\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146332 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-trusted-ca-bundle\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146347 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/49ad48a0-5ad4-4187-a4eb-022a38173868-node-bootstrap-token\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146360 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/49ad48a0-5ad4-4187-a4eb-022a38173868-certs\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146378 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146430 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fda0580-6dfb-41d7-b8a1-853b46605cfc-secret-volume\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146446 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wg8d\" (UniqueName: \"kubernetes.io/projected/5811a3b1-0a2a-453d-834d-7da03848939f-kube-api-access-7wg8d\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146461 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a270cd6-ec60-4090-bc24-b93bf929a664-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146484 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bcb77eca-2637-4767-9144-25f235e20787-proxy-tls\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146513 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-csi-data-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146549 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-config\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146590 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1ad4fa22-4664-425f-8a79-e15db99ff216-srv-cert\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146607 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146632 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/468b0644-ec7c-4009-8d15-c3318642027c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146654 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmmmx\" (UniqueName: \"kubernetes.io/projected/7867e90f-b983-4b90-a4de-3615ceeed726-kube-api-access-zmmmx\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146668 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146696 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ae7e8a7b-c39c-4a3e-a017-232a176966c7-apiservice-cert\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146711 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a270cd6-ec60-4090-bc24-b93bf929a664-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146734 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7867e90f-b983-4b90-a4de-3615ceeed726-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146796 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-metrics-tls\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146816 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f6163f29-831f-435e-8410-9f08f394ca5f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146832 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86mdz\" (UniqueName: \"kubernetes.io/projected/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-kube-api-access-86mdz\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146846 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bcb77eca-2637-4767-9144-25f235e20787-images\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146861 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fl8l\" (UniqueName: \"kubernetes.io/projected/9ba6bb0e-62bc-40d8-ae31-13c35837646b-kube-api-access-5fl8l\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146877 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-trusted-ca\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146905 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9ba6bb0e-62bc-40d8-ae31-13c35837646b-signing-key\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146940 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bcb77eca-2637-4767-9144-25f235e20787-auth-proxy-config\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146953 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-socket-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.146986 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-registry-tls\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.148085 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-config\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.149257 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-machine-approver-tls\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.150178 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-auth-proxy-config\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.153969 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m7rq7"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.153192 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-registry-certificates\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.154651 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.155222 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f6163f29-831f-435e-8410-9f08f394ca5f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.155519 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:38.655502312 +0000 UTC m=+136.862951400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.155714 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-config\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.156348 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-trusted-ca\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.158279 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.158481 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.160037 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f6163f29-831f-435e-8410-9f08f394ca5f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.162536 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-registry-tls\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.165328 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f53465c_6116_44fc_a09c_0a68311aef9d.slice/crio-5378c8a6d86c23cf65ba03478ddf37991b5ccbb808fa2dd91819d0509e5b4710 WatchSource:0}: Error finding container 5378c8a6d86c23cf65ba03478ddf37991b5ccbb808fa2dd91819d0509e5b4710: Status 404 returned error can't find the container with id 5378c8a6d86c23cf65ba03478ddf37991b5ccbb808fa2dd91819d0509e5b4710 Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.177873 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qw8lk"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.178733 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lwsz7"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.184217 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-bound-sa-token\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.192978 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ls45p"] Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.194424 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb45028e1_4454_4ec1_ac67_d470a334272c.slice/crio-8ff63b2efa2367374af39deaeb15c465c1e97c9b667feeead89d48a1d87751dd WatchSource:0}: Error finding container 8ff63b2efa2367374af39deaeb15c465c1e97c9b667feeead89d48a1d87751dd: Status 404 returned error can't find the container with id 8ff63b2efa2367374af39deaeb15c465c1e97c9b667feeead89d48a1d87751dd Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.206483 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-575qd\" (UniqueName: \"kubernetes.io/projected/77d85dfc-5133-4dce-a273-1e7e5dd5b5af-kube-api-access-575qd\") pod \"openshift-apiserver-operator-796bbdcf4f-z8xkg\" (UID: \"77d85dfc-5133-4dce-a273-1e7e5dd5b5af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.213419 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mh54x"] Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.220737 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d08068f_9519_41e4_9361_1810f60abdcf.slice/crio-48f18b3f0e11825763ab67cd1f85bc199d1acf2f01325af02477ed9b5064eede WatchSource:0}: Error finding container 48f18b3f0e11825763ab67cd1f85bc199d1acf2f01325af02477ed9b5064eede: Status 404 returned error can't find the container with id 48f18b3f0e11825763ab67cd1f85bc199d1acf2f01325af02477ed9b5064eede Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.223954 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45kns\" (UniqueName: \"kubernetes.io/projected/c9c90e5a-4004-4215-a1b3-dc2ab3c66627-kube-api-access-45kns\") pod \"openshift-controller-manager-operator-756b6f6bc6-h9xm6\" (UID: \"c9c90e5a-4004-4215-a1b3-dc2ab3c66627\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.247492 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69l7c\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-kube-api-access-69l7c\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.247812 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248056 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-trusted-ca-bundle\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248078 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/49ad48a0-5ad4-4187-a4eb-022a38173868-certs\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248092 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/49ad48a0-5ad4-4187-a4eb-022a38173868-node-bootstrap-token\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248121 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wg8d\" (UniqueName: \"kubernetes.io/projected/5811a3b1-0a2a-453d-834d-7da03848939f-kube-api-access-7wg8d\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248136 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a270cd6-ec60-4090-bc24-b93bf929a664-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248149 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fda0580-6dfb-41d7-b8a1-853b46605cfc-secret-volume\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248162 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bcb77eca-2637-4767-9144-25f235e20787-proxy-tls\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248176 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-csi-data-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248192 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1ad4fa22-4664-425f-8a79-e15db99ff216-srv-cert\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248209 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/468b0644-ec7c-4009-8d15-c3318642027c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248223 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmmmx\" (UniqueName: \"kubernetes.io/projected/7867e90f-b983-4b90-a4de-3615ceeed726-kube-api-access-zmmmx\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248337 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ae7e8a7b-c39c-4a3e-a017-232a176966c7-apiservice-cert\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248374 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a270cd6-ec60-4090-bc24-b93bf929a664-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248391 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248409 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7867e90f-b983-4b90-a4de-3615ceeed726-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248422 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-metrics-tls\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248442 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bcb77eca-2637-4767-9144-25f235e20787-images\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248456 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fl8l\" (UniqueName: \"kubernetes.io/projected/9ba6bb0e-62bc-40d8-ae31-13c35837646b-kube-api-access-5fl8l\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248469 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9ba6bb0e-62bc-40d8-ae31-13c35837646b-signing-key\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248483 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bcb77eca-2637-4767-9144-25f235e20787-auth-proxy-config\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248499 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-socket-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248511 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf8b18df-6138-4713-ba81-d0e67d9ba636-cert\") pod \"ingress-canary-lp2s6\" (UID: \"bf8b18df-6138-4713-ba81-d0e67d9ba636\") " pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248525 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s84gj\" (UniqueName: \"kubernetes.io/projected/bcb77eca-2637-4767-9144-25f235e20787-kube-api-access-s84gj\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248546 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-plugins-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248560 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2d81f678-276b-4f7c-82e2-d8ebc5c91c27-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kdmx6\" (UID: \"2d81f678-276b-4f7c-82e2-d8ebc5c91c27\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248575 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbbx4\" (UniqueName: \"kubernetes.io/projected/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-kube-api-access-zbbx4\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248588 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7867e90f-b983-4b90-a4de-3615ceeed726-proxy-tls\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248602 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248693 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf6tk\" (UniqueName: \"kubernetes.io/projected/2d81f678-276b-4f7c-82e2-d8ebc5c91c27-kube-api-access-rf6tk\") pod \"multus-admission-controller-857f4d67dd-kdmx6\" (UID: \"2d81f678-276b-4f7c-82e2-d8ebc5c91c27\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248707 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfws4\" (UniqueName: \"kubernetes.io/projected/1ad4fa22-4664-425f-8a79-e15db99ff216-kube-api-access-wfws4\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248726 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e34d8df-b4af-4781-a1c9-10de3c2a8717-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-j8gp4\" (UID: \"5e34d8df-b4af-4781-a1c9-10de3c2a8717\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248788 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa21087-4a5b-4458-b094-558ce575aa7a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248805 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-config\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248827 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-config\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248842 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b322636f-a692-4861-aad1-de2bee7eceee-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248861 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fda0580-6dfb-41d7-b8a1-853b46605cfc-config-volume\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248875 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b322636f-a692-4861-aad1-de2bee7eceee-config\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248887 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-serving-cert\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248901 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-mountpoint-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248915 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxmp\" (UniqueName: \"kubernetes.io/projected/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-kube-api-access-ppxmp\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248930 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4gp7\" (UniqueName: \"kubernetes.io/projected/63f028f1-f3fc-46f9-9eec-471fa53871ed-kube-api-access-q4gp7\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248944 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9lhl\" (UniqueName: \"kubernetes.io/projected/bf8b18df-6138-4713-ba81-d0e67d9ba636-kube-api-access-w9lhl\") pod \"ingress-canary-lp2s6\" (UID: \"bf8b18df-6138-4713-ba81-d0e67d9ba636\") " pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248960 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-oauth-serving-cert\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.248974 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmhss\" (UniqueName: \"kubernetes.io/projected/5e34d8df-b4af-4781-a1c9-10de3c2a8717-kube-api-access-kmhss\") pod \"control-plane-machine-set-operator-78cbb6b69f-j8gp4\" (UID: \"5e34d8df-b4af-4781-a1c9-10de3c2a8717\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.249002 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cfa21087-4a5b-4458-b094-558ce575aa7a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.249016 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a270cd6-ec60-4090-bc24-b93bf929a664-config\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.249029 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-serving-cert\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.249043 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-service-ca\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.249057 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmp95\" (UniqueName: \"kubernetes.io/projected/62060a19-8a42-42fc-acba-effc112e736b-kube-api-access-tmp95\") pod \"package-server-manager-789f6589d5-qdzns\" (UID: \"62060a19-8a42-42fc-acba-effc112e736b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.250134 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-config\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.250359 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:38.750338442 +0000 UTC m=+136.957787530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252139 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-socket-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.251707 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fda0580-6dfb-41d7-b8a1-853b46605cfc-config-volume\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252166 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm8wt\" (UniqueName: \"kubernetes.io/projected/2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341-kube-api-access-fm8wt\") pod \"migrator-59844c95c7-vt9jk\" (UID: \"2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.251809 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a270cd6-ec60-4090-bc24-b93bf929a664-config\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252196 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/62060a19-8a42-42fc-acba-effc112e736b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qdzns\" (UID: \"62060a19-8a42-42fc-acba-effc112e736b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252221 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76rxq\" (UniqueName: \"kubernetes.io/projected/6fda0580-6dfb-41d7-b8a1-853b46605cfc-kube-api-access-76rxq\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252246 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1ad4fa22-4664-425f-8a79-e15db99ff216-profile-collector-cert\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252251 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b322636f-a692-4861-aad1-de2bee7eceee-config\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252263 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-config-volume\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252287 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crp48\" (UniqueName: \"kubernetes.io/projected/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-kube-api-access-crp48\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252302 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252324 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ae7e8a7b-c39c-4a3e-a017-232a176966c7-tmpfs\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252342 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcxvl\" (UniqueName: \"kubernetes.io/projected/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-kube-api-access-kcxvl\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252359 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b322636f-a692-4861-aad1-de2bee7eceee-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252376 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9ba6bb0e-62bc-40d8-ae31-13c35837646b-signing-cabundle\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252392 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-257zk\" (UniqueName: \"kubernetes.io/projected/49ad48a0-5ad4-4187-a4eb-022a38173868-kube-api-access-257zk\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252425 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa21087-4a5b-4458-b094-558ce575aa7a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252444 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/468b0644-ec7c-4009-8d15-c3318642027c-srv-cert\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252461 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt2hd\" (UniqueName: \"kubernetes.io/projected/468b0644-ec7c-4009-8d15-c3318642027c-kube-api-access-jt2hd\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252475 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ae7e8a7b-c39c-4a3e-a017-232a176966c7-webhook-cert\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252488 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g75tg\" (UniqueName: \"kubernetes.io/projected/ae7e8a7b-c39c-4a3e-a017-232a176966c7-kube-api-access-g75tg\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252510 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-registration-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252529 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-oauth-config\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252545 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.251230 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-oauth-serving-cert\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.252019 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bcb77eca-2637-4767-9144-25f235e20787-auth-proxy-config\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.253894 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-plugins-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.253934 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-service-ca\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.254202 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-trusted-ca-bundle\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.254276 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-config\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.254592 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/49ad48a0-5ad4-4187-a4eb-022a38173868-node-bootstrap-token\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.254983 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-mountpoint-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.255767 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7867e90f-b983-4b90-a4de-3615ceeed726-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.256174 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bcb77eca-2637-4767-9144-25f235e20787-images\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.256406 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.256485 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45ee36e9_1af4_4c81_b1bb_563472694c43.slice/crio-67f86476b4a4b821170acc9bb78e38428f8fe5890ee0a0f065bc279ac84de483 WatchSource:0}: Error finding container 67f86476b4a4b821170acc9bb78e38428f8fe5890ee0a0f065bc279ac84de483: Status 404 returned error can't find the container with id 67f86476b4a4b821170acc9bb78e38428f8fe5890ee0a0f065bc279ac84de483 Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.256819 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ae7e8a7b-c39c-4a3e-a017-232a176966c7-tmpfs\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.257332 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-csi-data-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.257409 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-config-volume\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.258375 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf8b18df-6138-4713-ba81-d0e67d9ba636-cert\") pod \"ingress-canary-lp2s6\" (UID: \"bf8b18df-6138-4713-ba81-d0e67d9ba636\") " pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.259167 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5811a3b1-0a2a-453d-834d-7da03848939f-registration-dir\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.259908 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e34d8df-b4af-4781-a1c9-10de3c2a8717-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-j8gp4\" (UID: \"5e34d8df-b4af-4781-a1c9-10de3c2a8717\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.262732 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.265793 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa21087-4a5b-4458-b094-558ce575aa7a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.266049 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9ba6bb0e-62bc-40d8-ae31-13c35837646b-signing-cabundle\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.266482 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.266665 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-metrics-tls\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.267537 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa21087-4a5b-4458-b094-558ce575aa7a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.267586 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b322636f-a692-4861-aad1-de2bee7eceee-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.268501 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9ba6bb0e-62bc-40d8-ae31-13c35837646b-signing-key\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.269390 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-serving-cert\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.270204 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7867e90f-b983-4b90-a4de-3615ceeed726-proxy-tls\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.272428 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/468b0644-ec7c-4009-8d15-c3318642027c-srv-cert\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.272830 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/468b0644-ec7c-4009-8d15-c3318642027c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.274155 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86mdz\" (UniqueName: \"kubernetes.io/projected/119515af-4e2c-4e58-a7bc-9725c7c2d9d8-kube-api-access-86mdz\") pod \"machine-approver-56656f9798-wpqrh\" (UID: \"119515af-4e2c-4e58-a7bc-9725c7c2d9d8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.276298 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.276466 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.280738 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/62060a19-8a42-42fc-acba-effc112e736b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qdzns\" (UID: \"62060a19-8a42-42fc-acba-effc112e736b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.284834 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.285449 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/49ad48a0-5ad4-4187-a4eb-022a38173868-certs\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.285530 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bcb77eca-2637-4767-9144-25f235e20787-proxy-tls\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.285826 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2d81f678-276b-4f7c-82e2-d8ebc5c91c27-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kdmx6\" (UID: \"2d81f678-276b-4f7c-82e2-d8ebc5c91c27\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.286795 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ae7e8a7b-c39c-4a3e-a017-232a176966c7-apiservice-cert\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.287123 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ae7e8a7b-c39c-4a3e-a017-232a176966c7-webhook-cert\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.287249 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1ad4fa22-4664-425f-8a79-e15db99ff216-profile-collector-cert\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.287614 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a270cd6-ec60-4090-bc24-b93bf929a664-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.288227 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1ad4fa22-4664-425f-8a79-e15db99ff216-srv-cert\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.288397 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-serving-cert\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.291083 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fda0580-6dfb-41d7-b8a1-853b46605cfc-secret-volume\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.293178 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-oauth-config\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.306113 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fl8l\" (UniqueName: \"kubernetes.io/projected/9ba6bb0e-62bc-40d8-ae31-13c35837646b-kube-api-access-5fl8l\") pod \"service-ca-9c57cc56f-kxklc\" (UID: \"9ba6bb0e-62bc-40d8-ae31-13c35837646b\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.310985 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t"] Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.320208 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40445bd9_9b02_4602_b071_bb4da0da53b6.slice/crio-ca1d67d7a1d18caea96d421a2fc029bebdcf9f3e08e1fe8e99ef500949e320dd WatchSource:0}: Error finding container ca1d67d7a1d18caea96d421a2fc029bebdcf9f3e08e1fe8e99ef500949e320dd: Status 404 returned error can't find the container with id ca1d67d7a1d18caea96d421a2fc029bebdcf9f3e08e1fe8e99ef500949e320dd Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.323121 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wg8d\" (UniqueName: \"kubernetes.io/projected/5811a3b1-0a2a-453d-834d-7da03848939f-kube-api-access-7wg8d\") pod \"csi-hostpathplugin-d8gqr\" (UID: \"5811a3b1-0a2a-453d-834d-7da03848939f\") " pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.341029 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.344233 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a270cd6-ec60-4090-bc24-b93bf929a664-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8v28b\" (UID: \"2a270cd6-ec60-4090-bc24-b93bf929a664\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.353811 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.354395 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:38.854383705 +0000 UTC m=+137.061832783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.364822 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4gp7\" (UniqueName: \"kubernetes.io/projected/63f028f1-f3fc-46f9-9eec-471fa53871ed-kube-api-access-q4gp7\") pod \"console-f9d7485db-n85fv\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.371326 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod119515af_4e2c_4e58_a7bc_9725c7c2d9d8.slice/crio-5013453c68df72b0d8d0db12c3719553a2dcde52a966648f2e6be99a0ea83ea4 WatchSource:0}: Error finding container 5013453c68df72b0d8d0db12c3719553a2dcde52a966648f2e6be99a0ea83ea4: Status 404 returned error can't find the container with id 5013453c68df72b0d8d0db12c3719553a2dcde52a966648f2e6be99a0ea83ea4 Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.381721 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zzls8"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.394382 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmp95\" (UniqueName: \"kubernetes.io/projected/62060a19-8a42-42fc-acba-effc112e736b-kube-api-access-tmp95\") pod \"package-server-manager-789f6589d5-qdzns\" (UID: \"62060a19-8a42-42fc-acba-effc112e736b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.404162 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.408317 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9lhl\" (UniqueName: \"kubernetes.io/projected/bf8b18df-6138-4713-ba81-d0e67d9ba636-kube-api-access-w9lhl\") pod \"ingress-canary-lp2s6\" (UID: \"bf8b18df-6138-4713-ba81-d0e67d9ba636\") " pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.420345 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.424583 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbbx4\" (UniqueName: \"kubernetes.io/projected/b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a-kube-api-access-zbbx4\") pod \"kube-storage-version-migrator-operator-b67b599dd-gqxws\" (UID: \"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.431684 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.444239 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b322636f-a692-4861-aad1-de2bee7eceee-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bhwzd\" (UID: \"b322636f-a692-4861-aad1-de2bee7eceee\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.456065 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.456389 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:38.956377513 +0000 UTC m=+137.163826601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.473658 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.473913 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmhss\" (UniqueName: \"kubernetes.io/projected/5e34d8df-b4af-4781-a1c9-10de3c2a8717-kube-api-access-kmhss\") pod \"control-plane-machine-set-operator-78cbb6b69f-j8gp4\" (UID: \"5e34d8df-b4af-4781-a1c9-10de3c2a8717\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.475231 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.476826 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-phvcn"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.481512 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.492473 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.499024 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cfa21087-4a5b-4458-b094-558ce575aa7a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s8d2x\" (UID: \"cfa21087-4a5b-4458-b094-558ce575aa7a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.502452 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.507989 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.517361 4583 generic.go:334] "Generic (PLEG): container finished" podID="4d08068f-9519-41e4-9361-1810f60abdcf" containerID="9f0595bf2dabcf7a849f83c730b012d8dab31e6d8d3b9be73153c324dfd8a072" exitCode=0 Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.517541 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" event={"ID":"4d08068f-9519-41e4-9361-1810f60abdcf","Type":"ContainerDied","Data":"9f0595bf2dabcf7a849f83c730b012d8dab31e6d8d3b9be73153c324dfd8a072"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.517563 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" event={"ID":"4d08068f-9519-41e4-9361-1810f60abdcf","Type":"ContainerStarted","Data":"48f18b3f0e11825763ab67cd1f85bc199d1acf2f01325af02477ed9b5064eede"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.523137 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-lp2s6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.525337 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm8wt\" (UniqueName: \"kubernetes.io/projected/2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341-kube-api-access-fm8wt\") pod \"migrator-59844c95c7-vt9jk\" (UID: \"2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.526761 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.530206 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfws4\" (UniqueName: \"kubernetes.io/projected/1ad4fa22-4664-425f-8a79-e15db99ff216-kube-api-access-wfws4\") pod \"catalog-operator-68c6474976-vptkq\" (UID: \"1ad4fa22-4664-425f-8a79-e15db99ff216\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.541763 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s84gj\" (UniqueName: \"kubernetes.io/projected/bcb77eca-2637-4767-9144-25f235e20787-kube-api-access-s84gj\") pod \"machine-config-operator-74547568cd-6j7kj\" (UID: \"bcb77eca-2637-4767-9144-25f235e20787\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.542732 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" event={"ID":"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5","Type":"ContainerStarted","Data":"2e6f793749682aaae311b137d2529bcaab970ba982c321b8028977433d609844"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.542798 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" event={"ID":"c11b79e8-8b8c-45a3-8ab4-53be0b141ea5","Type":"ContainerStarted","Data":"262afe1cc5bdb75e6f6e026f2ed2c7ef69e732382506d9e5145c26e5a0e6f1ed"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.543148 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.545246 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" event={"ID":"40b62828-d0ca-4668-b621-9b963a53be7d","Type":"ContainerStarted","Data":"03fd08e082a7b5e643678506bdf3e539f3bb9d8948b4eea5adabace64656e0ad"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.545273 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" event={"ID":"40b62828-d0ca-4668-b621-9b963a53be7d","Type":"ContainerStarted","Data":"cd75def621b14e57f140847744bfa834e783f71f1d89177d1ec78db0428b85eb"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.545282 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" event={"ID":"40b62828-d0ca-4668-b621-9b963a53be7d","Type":"ContainerStarted","Data":"6305c3292fb3160c058025a43f4e3379f949d9a9fa2eb6fce02028558b793462"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.546806 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" event={"ID":"16821b35-4f2e-4733-a44c-3e3a8caaf98a","Type":"ContainerStarted","Data":"9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.546829 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" event={"ID":"16821b35-4f2e-4733-a44c-3e3a8caaf98a","Type":"ContainerStarted","Data":"26fd81b1f6bdf80a69806d25d0c6be9b66dd2288d7b6378bdddd6b9c5e5c069d"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.547245 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.547990 4583 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-tvq7j container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.548017 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" podUID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.549120 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" event={"ID":"119515af-4e2c-4e58-a7bc-9725c7c2d9d8","Type":"ContainerStarted","Data":"5013453c68df72b0d8d0db12c3719553a2dcde52a966648f2e6be99a0ea83ea4"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.551864 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" event={"ID":"8f53465c-6116-44fc-a09c-0a68311aef9d","Type":"ContainerStarted","Data":"0995edd88b832cac2c3c32dd3de8d70d3308d565f2dc277c7a06e382f9c2733e"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.551886 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" event={"ID":"8f53465c-6116-44fc-a09c-0a68311aef9d","Type":"ContainerStarted","Data":"5378c8a6d86c23cf65ba03478ddf37991b5ccbb808fa2dd91819d0509e5b4710"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.555823 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" event={"ID":"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0","Type":"ContainerStarted","Data":"cc0d0a324daeda19227c94451e0d4da04a9bd842bce5bc1145be29834ba520d2"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.555846 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" event={"ID":"24b1196a-b6e9-4094-8a7d-93b9a8e7ede0","Type":"ContainerStarted","Data":"b56b936ba6fd96fea520fca1d5ae761d9ad7a59275a62bfb4720f97db16a8303"} Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.563579 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77d85dfc_5133_4dce_a273_1e7e5dd5b5af.slice/crio-1dc8561f07d21d84fc4281bbeb9140f389e7e65dd31429f39982773569b6b41a WatchSource:0}: Error finding container 1dc8561f07d21d84fc4281bbeb9140f389e7e65dd31429f39982773569b6b41a: Status 404 returned error can't find the container with id 1dc8561f07d21d84fc4281bbeb9140f389e7e65dd31429f39982773569b6b41a Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.563623 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76rxq\" (UniqueName: \"kubernetes.io/projected/6fda0580-6dfb-41d7-b8a1-853b46605cfc-kube-api-access-76rxq\") pod \"collect-profiles-29421540-fvs7m\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.564902 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.565196 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.065186334 +0000 UTC m=+137.272635421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.569543 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" event={"ID":"1e08c725-bc3f-4816-bf4c-a6bdecded47e","Type":"ContainerStarted","Data":"c78972a4bcb0c6c885f3b61cea6593b0f804aea925582f95126e64391373fbfa"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.578078 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.578406 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" event={"ID":"24d1746e-1acb-433b-b1ea-c1d1aa756176","Type":"ContainerStarted","Data":"9e49953d5116bb53f7781f1d67efff40ed63fc00accddda538ec71918787b24c"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.581987 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qw8lk" event={"ID":"b45028e1-4454-4ec1-ac67-d470a334272c","Type":"ContainerStarted","Data":"4d759a2dfc9426189e369313460762026c50d99f6fd86755826e29c74f3f8274"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.582028 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qw8lk" event={"ID":"b45028e1-4454-4ec1-ac67-d470a334272c","Type":"ContainerStarted","Data":"8ff63b2efa2367374af39deaeb15c465c1e97c9b667feeead89d48a1d87751dd"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.582042 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.585203 4583 patch_prober.go:28] interesting pod/downloads-7954f5f757-qw8lk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.585244 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qw8lk" podUID="b45028e1-4454-4ec1-ac67-d470a334272c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.586492 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-lhdvm" event={"ID":"68cd346a-3a57-473b-8a06-9e3823688014","Type":"ContainerStarted","Data":"3b050bce0faed1f0cb92b76331331b89a576a6a001950750778ea2b0b3033a43"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.586533 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-lhdvm" event={"ID":"68cd346a-3a57-473b-8a06-9e3823688014","Type":"ContainerStarted","Data":"ff7369ce9237ead90b5d54e25d361a168a079c27677c328eadeb2a3ab7856545"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.589724 4583 generic.go:334] "Generic (PLEG): container finished" podID="a4ac6176-8382-4407-93d4-b74e1ec54a30" containerID="bcace508ce64a67bf6b7fad31dad42bd1382448d991a0df3916d1398d2ae48ab" exitCode=0 Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.589794 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" event={"ID":"a4ac6176-8382-4407-93d4-b74e1ec54a30","Type":"ContainerDied","Data":"bcace508ce64a67bf6b7fad31dad42bd1382448d991a0df3916d1398d2ae48ab"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.589816 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" event={"ID":"a4ac6176-8382-4407-93d4-b74e1ec54a30","Type":"ContainerStarted","Data":"a80f468da5f0251740066a81688eccfb8427901f9346de2f2d663161f16c4f40"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.592032 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" event={"ID":"f706d2ca-98e9-4f2f-849f-8ec937f492a5","Type":"ContainerStarted","Data":"116bf7725ddbbaedd5cf89f86e0cddd1c408d8b2137daedcb0d6f0cfbf093145"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.593378 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf6tk\" (UniqueName: \"kubernetes.io/projected/2d81f678-276b-4f7c-82e2-d8ebc5c91c27-kube-api-access-rf6tk\") pod \"multus-admission-controller-857f4d67dd-kdmx6\" (UID: \"2d81f678-276b-4f7c-82e2-d8ebc5c91c27\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.597229 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" event={"ID":"45ee36e9-1af4-4c81-b1bb-563472694c43","Type":"ContainerStarted","Data":"4b592cb04d9c4d4b817c6cd16cb41dd9a32831ad4172804b3fe4b12acf28ec4d"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.597254 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" event={"ID":"45ee36e9-1af4-4c81-b1bb-563472694c43","Type":"ContainerStarted","Data":"67f86476b4a4b821170acc9bb78e38428f8fe5890ee0a0f065bc279ac84de483"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.606782 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" event={"ID":"6baa2305-cdf4-4911-b91c-e26ee8ab9f64","Type":"ContainerStarted","Data":"fc51854b671ef27376b027810f4f603cfa81b0523c05bf57ac9082594f8d6b09"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.607139 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxmp\" (UniqueName: \"kubernetes.io/projected/a6204cd3-ce19-4ee8-a5e1-85b8ec12d060-kube-api-access-ppxmp\") pod \"service-ca-operator-777779d784-z7qq5\" (UID: \"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.614601 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" event={"ID":"40445bd9-9b02-4602-b071-bb4da0da53b6","Type":"ContainerStarted","Data":"ca1d67d7a1d18caea96d421a2fc029bebdcf9f3e08e1fe8e99ef500949e320dd"} Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.636352 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-257zk\" (UniqueName: \"kubernetes.io/projected/49ad48a0-5ad4-4187-a4eb-022a38173868-kube-api-access-257zk\") pod \"machine-config-server-jn54c\" (UID: \"49ad48a0-5ad4-4187-a4eb-022a38173868\") " pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.651479 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcxvl\" (UniqueName: \"kubernetes.io/projected/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-kube-api-access-kcxvl\") pod \"marketplace-operator-79b997595-nfx8b\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.663314 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crp48\" (UniqueName: \"kubernetes.io/projected/f4a5d8ed-6566-47a6-9b04-0fd07e33c942-kube-api-access-crp48\") pod \"dns-default-bsppj\" (UID: \"f4a5d8ed-6566-47a6-9b04-0fd07e33c942\") " pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.666820 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.668853 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.168830756 +0000 UTC m=+137.376279845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.695835 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g75tg\" (UniqueName: \"kubernetes.io/projected/ae7e8a7b-c39c-4a3e-a017-232a176966c7-kube-api-access-g75tg\") pod \"packageserver-d55dfcdfc-p29td\" (UID: \"ae7e8a7b-c39c-4a3e-a017-232a176966c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.730023 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmmmx\" (UniqueName: \"kubernetes.io/projected/7867e90f-b983-4b90-a4de-3615ceeed726-kube-api-access-zmmmx\") pod \"machine-config-controller-84d6567774-hj8bv\" (UID: \"7867e90f-b983-4b90-a4de-3615ceeed726\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.738204 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.741911 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.744300 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.750244 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.759668 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.760148 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt2hd\" (UniqueName: \"kubernetes.io/projected/468b0644-ec7c-4009-8d15-c3318642027c-kube-api-access-jt2hd\") pod \"olm-operator-6b444d44fb-tzgkc\" (UID: \"468b0644-ec7c-4009-8d15-c3318642027c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.760154 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.761662 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kxklc"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.766093 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.769253 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.769928 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.770416 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.270403389 +0000 UTC m=+137.477852477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.776391 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.791681 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.795304 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.796040 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.813033 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.815130 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.818246 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.825467 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.852127 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b"] Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.853316 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.870983 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.871413 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.37139955 +0000 UTC m=+137.578848638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.882190 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jn54c" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.904973 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.907672 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:38 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:38 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:38 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.907719 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:38 crc kubenswrapper[4583]: I1209 15:00:38.972892 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:38 crc kubenswrapper[4583]: E1209 15:00:38.973136 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.473125833 +0000 UTC m=+137.680574921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:38 crc kubenswrapper[4583]: W1209 15:00:38.988462 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a270cd6_ec60_4090_bc24_b93bf929a664.slice/crio-4ad4c0cc7a533abe2c34209f1353faf6fd16ae8c93ab21f3230197f5ec3c200d WatchSource:0}: Error finding container 4ad4c0cc7a533abe2c34209f1353faf6fd16ae8c93ab21f3230197f5ec3c200d: Status 404 returned error can't find the container with id 4ad4c0cc7a533abe2c34209f1353faf6fd16ae8c93ab21f3230197f5ec3c200d Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.077634 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.078419 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.578403277 +0000 UTC m=+137.785852366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.130811 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-d8gqr"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.176817 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-n85fv"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.180260 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.180525 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.680513718 +0000 UTC m=+137.887962795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: W1209 15:00:39.204769 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5811a3b1_0a2a_453d_834d_7da03848939f.slice/crio-8a47032b6c016c602542354a9fb09f90020142f5d016240f324211d6f012b6a0 WatchSource:0}: Error finding container 8a47032b6c016c602542354a9fb09f90020142f5d016240f324211d6f012b6a0: Status 404 returned error can't find the container with id 8a47032b6c016c602542354a9fb09f90020142f5d016240f324211d6f012b6a0 Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.229518 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-lp2s6"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.250108 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4"] Dec 09 15:00:39 crc kubenswrapper[4583]: W1209 15:00:39.273710 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49ad48a0_5ad4_4187_a4eb_022a38173868.slice/crio-71feb3e16e318b84dd572df8870118893e1f6cccc83d2427fffdb28cd69c3b0d WatchSource:0}: Error finding container 71feb3e16e318b84dd572df8870118893e1f6cccc83d2427fffdb28cd69c3b0d: Status 404 returned error can't find the container with id 71feb3e16e318b84dd572df8870118893e1f6cccc83d2427fffdb28cd69c3b0d Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.281888 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.282172 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.782160139 +0000 UTC m=+137.989609228 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.387937 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.388360 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.888347717 +0000 UTC m=+138.095796805 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: W1209 15:00:39.437365 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e34d8df_b4af_4781_a1c9_10de3c2a8717.slice/crio-a0ee5cad127b6d6c0135d5ced8cf29017e8cc067a8091cc577f2660c600fa8a0 WatchSource:0}: Error finding container a0ee5cad127b6d6c0135d5ced8cf29017e8cc067a8091cc577f2660c600fa8a0: Status 404 returned error can't find the container with id a0ee5cad127b6d6c0135d5ced8cf29017e8cc067a8091cc577f2660c600fa8a0 Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.444668 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" podStartSLOduration=117.444651868 podStartE2EDuration="1m57.444651868s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:39.402897389 +0000 UTC m=+137.610346477" watchObservedRunningTime="2025-12-09 15:00:39.444651868 +0000 UTC m=+137.652100956" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.491694 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.492142 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:39.992127518 +0000 UTC m=+138.199576606 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.556727 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.595679 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.596200 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.09618891 +0000 UTC m=+138.303637998 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: W1209 15:00:39.612473 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod468b0644_ec7c_4009_8d15_c3318642027c.slice/crio-da30c21bb7e733f6d4f00c8e2df6d43a1fc17c382f60201a93c47f0e99ec4373 WatchSource:0}: Error finding container da30c21bb7e733f6d4f00c8e2df6d43a1fc17c382f60201a93c47f0e99ec4373: Status 404 returned error can't find the container with id da30c21bb7e733f6d4f00c8e2df6d43a1fc17c382f60201a93c47f0e99ec4373 Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.648070 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" event={"ID":"2a270cd6-ec60-4090-bc24-b93bf929a664","Type":"ContainerStarted","Data":"4ad4c0cc7a533abe2c34209f1353faf6fd16ae8c93ab21f3230197f5ec3c200d"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.692926 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-m7rq7" podStartSLOduration=117.692906782 podStartE2EDuration="1m57.692906782s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:39.649197072 +0000 UTC m=+137.856646159" watchObservedRunningTime="2025-12-09 15:00:39.692906782 +0000 UTC m=+137.900355871" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.697035 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" event={"ID":"40445bd9-9b02-4602-b071-bb4da0da53b6","Type":"ContainerStarted","Data":"e31e097e7ed2fbabf0fcfd5ea94a6278463e9fdc2e603898dd6ece60515879b4"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.697068 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" event={"ID":"40445bd9-9b02-4602-b071-bb4da0da53b6","Type":"ContainerStarted","Data":"da3f81d94220de97e375fce07b6be5d535d03de47d2cff766e68a85ef9e44e83"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.699717 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.700384 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.200369508 +0000 UTC m=+138.407818597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.720526 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" event={"ID":"c9c90e5a-4004-4215-a1b3-dc2ab3c66627","Type":"ContainerStarted","Data":"1e9f6499d7920d9f892d0c312b454367b61bc5eecf385e8307e1275fc633e1d1"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.720558 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" event={"ID":"c9c90e5a-4004-4215-a1b3-dc2ab3c66627","Type":"ContainerStarted","Data":"f3a85bdae0c7e1ec97669a5e9fe4e55351feecf39aae06dced60b204ea19e0de"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.732703 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-lhdvm" podStartSLOduration=117.732688928 podStartE2EDuration="1m57.732688928s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:39.690288135 +0000 UTC m=+137.897737223" watchObservedRunningTime="2025-12-09 15:00:39.732688928 +0000 UTC m=+137.940138016" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.777319 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" event={"ID":"4d08068f-9519-41e4-9361-1810f60abdcf","Type":"ContainerStarted","Data":"896cc777887dcadd069810e06b5a7ce8beb5e90e63768414c70c26e63123a712"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.778047 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.801404 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.801713 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.301700553 +0000 UTC m=+138.509149641 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.806934 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:39 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:39 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:39 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.807104 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.817803 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" event={"ID":"a4ac6176-8382-4407-93d4-b74e1ec54a30","Type":"ContainerStarted","Data":"dcba732dd472efe434d265ef9fff9701cec157e4aa434fb3bda5b08667683d3f"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.821023 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-qw8lk" podStartSLOduration=117.821012059 podStartE2EDuration="1m57.821012059s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:39.820010373 +0000 UTC m=+138.027459461" watchObservedRunningTime="2025-12-09 15:00:39.821012059 +0000 UTC m=+138.028461147" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.841370 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-wjg5r" podStartSLOduration=117.841353544 podStartE2EDuration="1m57.841353544s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:39.841265628 +0000 UTC m=+138.048714716" watchObservedRunningTime="2025-12-09 15:00:39.841353544 +0000 UTC m=+138.048802632" Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.846390 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" event={"ID":"f706d2ca-98e9-4f2f-849f-8ec937f492a5","Type":"ContainerStarted","Data":"639b2099d0fd531cd73cc45a9027fa285af447b66c44080710e96432df4f838c"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.846423 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" event={"ID":"f706d2ca-98e9-4f2f-849f-8ec937f492a5","Type":"ContainerStarted","Data":"edbc5643c1ce3c1ac2d54049cb0767ee73a6474c51c654fcfa1c330862288ff6"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.873160 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" event={"ID":"62060a19-8a42-42fc-acba-effc112e736b","Type":"ContainerStarted","Data":"44e0260e22e19dd3771b38448620f035e91102ac0c5ebc4fb552bad1880e35cb"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.873198 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" event={"ID":"62060a19-8a42-42fc-acba-effc112e736b","Type":"ContainerStarted","Data":"a180d999cee41b849c5064a013e0502029cda67d2d173020003f666e9fda8434"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.873355 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kdmx6"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.904720 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:39 crc kubenswrapper[4583]: E1209 15:00:39.905464 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.4054519 +0000 UTC m=+138.612900988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.907895 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" event={"ID":"54599197-2100-442c-8dbb-f536092eb76a","Type":"ContainerStarted","Data":"592d1e41259d67db6d9c3c601753988075b3f7dc79d70641886e1667bf43fca9"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.907955 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" event={"ID":"54599197-2100-442c-8dbb-f536092eb76a","Type":"ContainerStarted","Data":"7d84a5cc8f9f7fccbbc41e904a685be452732e0209563b700c49425d309e0547"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.907975 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.910026 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.921525 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" event={"ID":"119515af-4e2c-4e58-a7bc-9725c7c2d9d8","Type":"ContainerStarted","Data":"d6189c9873d8e165e04459b69179b791d4161ea100a0d3fdf29f1caaf4ab63bb"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.929777 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" event={"ID":"77d85dfc-5133-4dce-a273-1e7e5dd5b5af","Type":"ContainerStarted","Data":"1ad6fd62f247323e4b414962b51affb75674f393398affcfb1fc4bdc79cf4f40"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.929993 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" event={"ID":"77d85dfc-5133-4dce-a273-1e7e5dd5b5af","Type":"ContainerStarted","Data":"1dc8561f07d21d84fc4281bbeb9140f389e7e65dd31429f39982773569b6b41a"} Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.938790 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.962092 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq"] Dec 09 15:00:39 crc kubenswrapper[4583]: I1209 15:00:39.962129 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jn54c" event={"ID":"49ad48a0-5ad4-4187-a4eb-022a38173868","Type":"ContainerStarted","Data":"71feb3e16e318b84dd572df8870118893e1f6cccc83d2427fffdb28cd69c3b0d"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.051271 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" event={"ID":"1e08c725-bc3f-4816-bf4c-a6bdecded47e","Type":"ContainerStarted","Data":"412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.065081 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.067358 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.086480 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.58646076 +0000 UTC m=+138.793909848 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.141279 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f6n4h" podStartSLOduration=118.141261125 podStartE2EDuration="1m58.141261125s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.139574191 +0000 UTC m=+138.347023279" watchObservedRunningTime="2025-12-09 15:00:40.141261125 +0000 UTC m=+138.348710212" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.156136 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-n85fv" event={"ID":"63f028f1-f3fc-46f9-9eec-471fa53871ed","Type":"ContainerStarted","Data":"635166240af60ae7df89fa30974c7e83d9d6f01b0bd88c84a0c6f0d3a1fcc63d"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.156274 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" event={"ID":"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a","Type":"ContainerStarted","Data":"7f55cd7eed8ad4eea76fa16088c3e89360be4af10af8ef7a239ed82ab828ffa5"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.156497 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.157370 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" event={"ID":"b9cd3c9b-2fb2-4c54-a5cd-b8aec8ed565a","Type":"ContainerStarted","Data":"61b662281e53740fe5a85d7b7203b6dcfab560440028fa7b3f8bfd0ccef9625e"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.165225 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bsppj"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.166904 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nfx8b"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.168654 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.188165 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.188427 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.194130 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.202088 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.202811 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.702793463 +0000 UTC m=+138.910242551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.208966 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" event={"ID":"24d1746e-1acb-433b-b1ea-c1d1aa756176","Type":"ContainerStarted","Data":"5d2a3f2b1e13243a777aebf844619783399b3cf6642bab4a28fbd2fc03db3911"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.209716 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.211436 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd"] Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.217096 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" event={"ID":"5e34d8df-b4af-4781-a1c9-10de3c2a8717","Type":"ContainerStarted","Data":"a0ee5cad127b6d6c0135d5ced8cf29017e8cc067a8091cc577f2660c600fa8a0"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.234625 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" event={"ID":"9ba6bb0e-62bc-40d8-ae31-13c35837646b","Type":"ContainerStarted","Data":"f7d8d1334fada197878e701a99e6c88b6942c4d3f0076ea7ab6c194cdc7d43a2"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.234665 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" event={"ID":"9ba6bb0e-62bc-40d8-ae31-13c35837646b","Type":"ContainerStarted","Data":"46f45ddf5f19fcbd5c7d5b4f28a4f5492134df36f4c338efec9fd85ccf715845"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.280292 4583 generic.go:334] "Generic (PLEG): container finished" podID="6baa2305-cdf4-4911-b91c-e26ee8ab9f64" containerID="caf5a103026b1eec5ca001f10d3ef67316a4cb29639d3a2db3537657f89132db" exitCode=0 Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.280367 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" event={"ID":"6baa2305-cdf4-4911-b91c-e26ee8ab9f64","Type":"ContainerDied","Data":"caf5a103026b1eec5ca001f10d3ef67316a4cb29639d3a2db3537657f89132db"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.296042 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-lp2s6" event={"ID":"bf8b18df-6138-4713-ba81-d0e67d9ba636","Type":"ContainerStarted","Data":"3bdfca3ddadf12d67cb21a7b35248f4f6feb7b5a80ca2a516ee912b9a32150c3"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.306707 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-mh54x" podStartSLOduration=118.306696967 podStartE2EDuration="1m58.306696967s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.305226242 +0000 UTC m=+138.512675330" watchObservedRunningTime="2025-12-09 15:00:40.306696967 +0000 UTC m=+138.514146055" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.307517 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.309356 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.809344148 +0000 UTC m=+139.016793236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.324001 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" event={"ID":"5811a3b1-0a2a-453d-834d-7da03848939f","Type":"ContainerStarted","Data":"8a47032b6c016c602542354a9fb09f90020142f5d016240f324211d6f012b6a0"} Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.346401 4583 patch_prober.go:28] interesting pod/downloads-7954f5f757-qw8lk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.346462 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qw8lk" podUID="b45028e1-4454-4ec1-ac67-d470a334272c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.354929 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.410654 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.412009 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:40.911995512 +0000 UTC m=+139.119444600 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.424354 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-9k4b2" podStartSLOduration=118.42434117 podStartE2EDuration="1m58.42434117s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.366327775 +0000 UTC m=+138.573776863" watchObservedRunningTime="2025-12-09 15:00:40.42434117 +0000 UTC m=+138.631790259" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.515281 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.516377 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.016364457 +0000 UTC m=+139.223813544 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.606037 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" podStartSLOduration=118.606021072 podStartE2EDuration="1m58.606021072s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.6019258 +0000 UTC m=+138.809374888" watchObservedRunningTime="2025-12-09 15:00:40.606021072 +0000 UTC m=+138.813470160" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.616480 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.616541 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.116527881 +0000 UTC m=+139.323976969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.618923 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.619203 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.119193027 +0000 UTC m=+139.326642114 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.632215 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" podStartSLOduration=118.632202702 podStartE2EDuration="1m58.632202702s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.630498006 +0000 UTC m=+138.837947094" watchObservedRunningTime="2025-12-09 15:00:40.632202702 +0000 UTC m=+138.839651791" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.663969 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-kxklc" podStartSLOduration=118.663954838 podStartE2EDuration="1m58.663954838s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.662098183 +0000 UTC m=+138.869547262" watchObservedRunningTime="2025-12-09 15:00:40.663954838 +0000 UTC m=+138.871403925" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.700206 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z8xkg" podStartSLOduration=118.700180671 podStartE2EDuration="1m58.700180671s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.698883706 +0000 UTC m=+138.906332795" watchObservedRunningTime="2025-12-09 15:00:40.700180671 +0000 UTC m=+138.907629760" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.721012 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.721532 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.221519796 +0000 UTC m=+139.428968874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.769460 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-lwsz7" podStartSLOduration=118.769446307 podStartE2EDuration="1m58.769446307s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.7650045 +0000 UTC m=+138.972453588" watchObservedRunningTime="2025-12-09 15:00:40.769446307 +0000 UTC m=+138.976895396" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.796707 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.798046 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:40 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:40 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:40 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.798109 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.826273 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.826722 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.326711779 +0000 UTC m=+139.534160867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.872075 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" podStartSLOduration=118.872061763 podStartE2EDuration="1m58.872061763s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.870792412 +0000 UTC m=+139.078241499" watchObservedRunningTime="2025-12-09 15:00:40.872061763 +0000 UTC m=+139.079510851" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.872714 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-kxl9t" podStartSLOduration=118.872709089 podStartE2EDuration="1m58.872709089s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.832558356 +0000 UTC m=+139.040007444" watchObservedRunningTime="2025-12-09 15:00:40.872709089 +0000 UTC m=+139.080158178" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.910040 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-jn54c" podStartSLOduration=5.910029104 podStartE2EDuration="5.910029104s" podCreationTimestamp="2025-12-09 15:00:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.908453261 +0000 UTC m=+139.115902349" watchObservedRunningTime="2025-12-09 15:00:40.910029104 +0000 UTC m=+139.117478192" Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.927699 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:40 crc kubenswrapper[4583]: E1209 15:00:40.928045 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.428033836 +0000 UTC m=+139.635482924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:40 crc kubenswrapper[4583]: I1209 15:00:40.975771 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gqxws" podStartSLOduration=118.975727267 podStartE2EDuration="1m58.975727267s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:40.974511456 +0000 UTC m=+139.181960545" watchObservedRunningTime="2025-12-09 15:00:40.975727267 +0000 UTC m=+139.183176355" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.030763 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.031275 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.531264476 +0000 UTC m=+139.738713564 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.132133 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.132227 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.632208949 +0000 UTC m=+139.839658037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.132289 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.132595 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.632585092 +0000 UTC m=+139.840034179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.145519 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" podStartSLOduration=119.145506281 podStartE2EDuration="1m59.145506281s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.143447673 +0000 UTC m=+139.350896762" watchObservedRunningTime="2025-12-09 15:00:41.145506281 +0000 UTC m=+139.352955369" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.233654 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.234190 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.73417656 +0000 UTC m=+139.941625647 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.243694 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-lp2s6" podStartSLOduration=6.243677224 podStartE2EDuration="6.243677224s" podCreationTimestamp="2025-12-09 15:00:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.196510519 +0000 UTC m=+139.403959607" watchObservedRunningTime="2025-12-09 15:00:41.243677224 +0000 UTC m=+139.451126311" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.282249 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" podStartSLOduration=119.282234831 podStartE2EDuration="1m59.282234831s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.243935622 +0000 UTC m=+139.451384710" watchObservedRunningTime="2025-12-09 15:00:41.282234831 +0000 UTC m=+139.489683919" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.283955 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h9xm6" podStartSLOduration=119.283948986 podStartE2EDuration="1m59.283948986s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.28321701 +0000 UTC m=+139.490666099" watchObservedRunningTime="2025-12-09 15:00:41.283948986 +0000 UTC m=+139.491398064" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.336345 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.336656 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.836644466 +0000 UTC m=+140.044093554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.349891 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" event={"ID":"b322636f-a692-4861-aad1-de2bee7eceee","Type":"ContainerStarted","Data":"3f4443acbbee876031b37200990bb6218becd50122da87f1e58afb95827b554b"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.395104 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-n85fv" event={"ID":"63f028f1-f3fc-46f9-9eec-471fa53871ed","Type":"ContainerStarted","Data":"ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.413010 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" event={"ID":"2a270cd6-ec60-4090-bc24-b93bf929a664","Type":"ContainerStarted","Data":"89fd5b6e30a1e58b3074217311b5a2f14b6a3ffc5c3429f925906046192c162b"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.419845 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-lp2s6" event={"ID":"bf8b18df-6138-4713-ba81-d0e67d9ba636","Type":"ContainerStarted","Data":"ded59e1ccd8394cc8c2351e63d2159f27a6093f182bf5ee990e65ccdecddcdfb"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.427852 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" event={"ID":"ae7e8a7b-c39c-4a3e-a017-232a176966c7","Type":"ContainerStarted","Data":"fb9fef42b6e9c847b1160db4540f1246534d0cd51cf18e71950e21ade5038745"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.432033 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" event={"ID":"ae7e8a7b-c39c-4a3e-a017-232a176966c7","Type":"ContainerStarted","Data":"5cc10ce20dc9497c1950321ffc1e15e3f167fa613a9a5749630c9efa5d93c8f4"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.434048 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.435569 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-n85fv" podStartSLOduration=119.435557143 podStartE2EDuration="1m59.435557143s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.434461588 +0000 UTC m=+139.641910677" watchObservedRunningTime="2025-12-09 15:00:41.435557143 +0000 UTC m=+139.643006231" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.438945 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.439402 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:41.939385459 +0000 UTC m=+140.146834547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.447847 4583 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-p29td container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:5443/healthz\": dial tcp 10.217.0.31:5443: connect: connection refused" start-of-body= Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.447892 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" podUID="ae7e8a7b-c39c-4a3e-a017-232a176966c7" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.31:5443/healthz\": dial tcp 10.217.0.31:5443: connect: connection refused" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.448163 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" event={"ID":"6fda0580-6dfb-41d7-b8a1-853b46605cfc","Type":"ContainerStarted","Data":"4987007ca34ed919d1ae78e7d38a908fe42404ed2e3322035c16331b3ea8c633"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.448185 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" event={"ID":"6fda0580-6dfb-41d7-b8a1-853b46605cfc","Type":"ContainerStarted","Data":"7a66e511017cc11d27fd4532d134f64e80e9e80ccd17331ada8732ecc451fdf7"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.481491 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8v28b" podStartSLOduration=119.481477988 podStartE2EDuration="1m59.481477988s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.481175416 +0000 UTC m=+139.688624504" watchObservedRunningTime="2025-12-09 15:00:41.481477988 +0000 UTC m=+139.688927077" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.484231 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" event={"ID":"1ad4fa22-4664-425f-8a79-e15db99ff216","Type":"ContainerStarted","Data":"1e6301f7ad273086e2d09340f879fd361113852d12ce4a054d1131f83f5835c6"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.484273 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" event={"ID":"1ad4fa22-4664-425f-8a79-e15db99ff216","Type":"ContainerStarted","Data":"5f3f3464d056e3ed54fe02ef7998057133bb38631e681e1cc742588f0d5499ab"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.484880 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.488443 4583 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-vptkq container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.488501 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" podUID="1ad4fa22-4664-425f-8a79-e15db99ff216" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.537103 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" podStartSLOduration=119.537090009 podStartE2EDuration="1m59.537090009s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.536241533 +0000 UTC m=+139.743690621" watchObservedRunningTime="2025-12-09 15:00:41.537090009 +0000 UTC m=+139.744539097" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.541873 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.542851 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.042839332 +0000 UTC m=+140.250288420 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.544351 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" event={"ID":"54599197-2100-442c-8dbb-f536092eb76a","Type":"ContainerStarted","Data":"71a3feac54ba891ad6a47aaca509de05d25af190bbcbc46a14935f49eee709cf"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.555966 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jn54c" event={"ID":"49ad48a0-5ad4-4187-a4eb-022a38173868","Type":"ContainerStarted","Data":"ce69fcae857f643345c135e71df0da0d4f4c174fc290b15835e58d6d3ae53167"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.573946 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" event={"ID":"468b0644-ec7c-4009-8d15-c3318642027c","Type":"ContainerStarted","Data":"3475e052ea9c943089c16b98b224573baddf5458c6c2c1b3ae01823b699dee5a"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.574106 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" event={"ID":"468b0644-ec7c-4009-8d15-c3318642027c","Type":"ContainerStarted","Data":"da30c21bb7e733f6d4f00c8e2df6d43a1fc17c382f60201a93c47f0e99ec4373"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.574850 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.576116 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" podStartSLOduration=119.576106345 podStartE2EDuration="1m59.576106345s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.57420707 +0000 UTC m=+139.781656158" watchObservedRunningTime="2025-12-09 15:00:41.576106345 +0000 UTC m=+139.783555433" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.594081 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.600266 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j8gp4" event={"ID":"5e34d8df-b4af-4781-a1c9-10de3c2a8717","Type":"ContainerStarted","Data":"ee33c0024e25920cfd7168267bf49d38910e8b7c0555fafd84d22ae83e830848"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.618500 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" event={"ID":"5811a3b1-0a2a-453d-834d-7da03848939f","Type":"ContainerStarted","Data":"e72d60a412bb3c19a69ce7a50b06fa05bb7bfe92e17f05dd130ee6056a49f4de"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.640016 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" podStartSLOduration=41.640001937 podStartE2EDuration="41.640001937s" podCreationTimestamp="2025-12-09 15:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.609438431 +0000 UTC m=+139.816887519" watchObservedRunningTime="2025-12-09 15:00:41.640001937 +0000 UTC m=+139.847451025" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.649971 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" event={"ID":"2d81f678-276b-4f7c-82e2-d8ebc5c91c27","Type":"ContainerStarted","Data":"4a33e206269ba75c18b08082e942582ca35adfd4e4ea028c1d0fa6953adc93c1"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.650008 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" event={"ID":"2d81f678-276b-4f7c-82e2-d8ebc5c91c27","Type":"ContainerStarted","Data":"60b92ab00cf5568c5d092fafbcb84b48a962a21399dea446164f6990150244d3"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.651159 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.651294 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.151281127 +0000 UTC m=+140.358730215 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.651514 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.653027 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.153013706 +0000 UTC m=+140.360462795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.679153 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tzgkc" podStartSLOduration=119.679130044 podStartE2EDuration="1m59.679130044s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.638979641 +0000 UTC m=+139.846428729" watchObservedRunningTime="2025-12-09 15:00:41.679130044 +0000 UTC m=+139.886579132" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.680270 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lds5j" podStartSLOduration=119.680261977 podStartE2EDuration="1m59.680261977s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.678852328 +0000 UTC m=+139.886301416" watchObservedRunningTime="2025-12-09 15:00:41.680261977 +0000 UTC m=+139.887711065" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.717897 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" event={"ID":"62060a19-8a42-42fc-acba-effc112e736b","Type":"ContainerStarted","Data":"aa5f173a87e4f5abaf2af16e67126c965888407cbee97a93b9e7439884a2a796"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.718158 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.728848 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" event={"ID":"2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341","Type":"ContainerStarted","Data":"5a6dcdabb9904a0d2e09c461985d9eb654e3bc773d199b0cc4d2aa0d4bd0b76e"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.728878 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" event={"ID":"2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341","Type":"ContainerStarted","Data":"c141f8b9634e61de00f131521f920a9f440a719bf45ac7ddcc36c617c3a1094d"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.728889 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" event={"ID":"2c50cfa6-e57c-4fe0-bc5f-b6ba12e1a341","Type":"ContainerStarted","Data":"825ef9f2e3bcec5efb477f1e4cdb5e57a7d14628b07fc35a448e27baa4417c5f"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.745259 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bsppj" event={"ID":"f4a5d8ed-6566-47a6-9b04-0fd07e33c942","Type":"ContainerStarted","Data":"c644f47a0d93e0ce19bf4af1809f70299f128818ab778dbe9ab355effd23537e"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.745316 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bsppj" event={"ID":"f4a5d8ed-6566-47a6-9b04-0fd07e33c942","Type":"ContainerStarted","Data":"c885ac9c3fea2e34fdedf760897e69dc98ea208ead412b196b57956ba86674d9"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.746052 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.752145 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.753383 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.253369025 +0000 UTC m=+140.460818113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.759384 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" event={"ID":"bcb77eca-2637-4767-9144-25f235e20787","Type":"ContainerStarted","Data":"4f2c57ba41a6186b5502a05900351175523a2f69460857a3dd4aa91ba475c691"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.759429 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" event={"ID":"bcb77eca-2637-4767-9144-25f235e20787","Type":"ContainerStarted","Data":"ee52a18c3c55b6330d6d71c0e48352c4b1fbb31dffab0604414e0a963f39b1dd"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.760058 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" podStartSLOduration=119.760043258 podStartE2EDuration="1m59.760043258s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.70127368 +0000 UTC m=+139.908722769" watchObservedRunningTime="2025-12-09 15:00:41.760043258 +0000 UTC m=+139.967492346" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.760675 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" podStartSLOduration=119.760669524 podStartE2EDuration="1m59.760669524s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.759113659 +0000 UTC m=+139.966562747" watchObservedRunningTime="2025-12-09 15:00:41.760669524 +0000 UTC m=+139.968118612" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.774073 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" event={"ID":"cfa21087-4a5b-4458-b094-558ce575aa7a","Type":"ContainerStarted","Data":"a91349272396d522fb40cdf42c249fe481fa95fa4854407b372cbaecf0692a0d"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.774122 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" event={"ID":"cfa21087-4a5b-4458-b094-558ce575aa7a","Type":"ContainerStarted","Data":"1fdf78091df31f4ac896cee779f09ce0eb92dbfaca561295a2cbd89aa16d9294"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.776254 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" event={"ID":"8f0567dd-f660-4e85-bc98-2e3b60b6a34c","Type":"ContainerStarted","Data":"ee4dc382e3a5f4610479338d2283267a9ef3c2c7765d601606b5259aa6386a52"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.776299 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" event={"ID":"8f0567dd-f660-4e85-bc98-2e3b60b6a34c","Type":"ContainerStarted","Data":"6800e34f36cf54d5a1547f3dd2e3d3ac05f3f9a066aa5af2c76955f9c1a828e0"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.776821 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.780860 4583 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nfx8b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.780910 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.782120 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" event={"ID":"7867e90f-b983-4b90-a4de-3615ceeed726","Type":"ContainerStarted","Data":"a3d8dbdaea3bb9e580d10961e9972b528f7598fc2e26c679f2b63f9b5bd5d3bb"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.782154 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" event={"ID":"7867e90f-b983-4b90-a4de-3615ceeed726","Type":"ContainerStarted","Data":"20ea45c3f398bdf20ed3e0df2fd7e39c67b35fbe9dbd633f62e9fd004f781d6f"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.800468 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:41 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:41 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:41 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.800517 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.801717 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-bsppj" podStartSLOduration=6.801705473 podStartE2EDuration="6.801705473s" podCreationTimestamp="2025-12-09 15:00:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.800931619 +0000 UTC m=+140.008380707" watchObservedRunningTime="2025-12-09 15:00:41.801705473 +0000 UTC m=+140.009154551" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.806913 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" event={"ID":"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060","Type":"ContainerStarted","Data":"d1caecf6ad8c1d5bca36d2936fc80dbd15c8eaa4f6558f172cb0d2ebdcfdc02f"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.806948 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" event={"ID":"a6204cd3-ce19-4ee8-a5e1-85b8ec12d060","Type":"ContainerStarted","Data":"7e36e8e234fe85a474451be8cdac49310dabb2016c3234be4a0d7eb5e4883b6c"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.816439 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" event={"ID":"119515af-4e2c-4e58-a7bc-9725c7c2d9d8","Type":"ContainerStarted","Data":"1c6700ed07c08faa189ff5a1fd5b009af21e560febccebf25444213abb5c3cdb"} Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.825442 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vt9jk" podStartSLOduration=119.825432267 podStartE2EDuration="1m59.825432267s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.824075498 +0000 UTC m=+140.031524587" watchObservedRunningTime="2025-12-09 15:00:41.825432267 +0000 UTC m=+140.032881356" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.837482 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ls45p" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.851028 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s8d2x" podStartSLOduration=119.851018571 podStartE2EDuration="1m59.851018571s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.85057995 +0000 UTC m=+140.058029038" watchObservedRunningTime="2025-12-09 15:00:41.851018571 +0000 UTC m=+140.058467658" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.854299 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.855343 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.355331444 +0000 UTC m=+140.562780532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.881625 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" podStartSLOduration=119.881609327 podStartE2EDuration="1m59.881609327s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.881289722 +0000 UTC m=+140.088738810" watchObservedRunningTime="2025-12-09 15:00:41.881609327 +0000 UTC m=+140.089058415" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.950287 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" podStartSLOduration=119.950271381 podStartE2EDuration="1m59.950271381s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.948290722 +0000 UTC m=+140.155739810" watchObservedRunningTime="2025-12-09 15:00:41.950271381 +0000 UTC m=+140.157720469" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.950669 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" podStartSLOduration=119.950664816 podStartE2EDuration="1m59.950664816s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:41.914493365 +0000 UTC m=+140.121942453" watchObservedRunningTime="2025-12-09 15:00:41.950664816 +0000 UTC m=+140.158113904" Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.955096 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.955205 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.455183229 +0000 UTC m=+140.662632317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:41 crc kubenswrapper[4583]: I1209 15:00:41.955475 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:41 crc kubenswrapper[4583]: E1209 15:00:41.959023 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.458995856 +0000 UTC m=+140.666444944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.035268 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z7qq5" podStartSLOduration=120.035251615 podStartE2EDuration="2m0.035251615s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:42.005686399 +0000 UTC m=+140.213135487" watchObservedRunningTime="2025-12-09 15:00:42.035251615 +0000 UTC m=+140.242700703" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.035804 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wpqrh" podStartSLOduration=120.0357991 podStartE2EDuration="2m0.0357991s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:42.033198238 +0000 UTC m=+140.240647326" watchObservedRunningTime="2025-12-09 15:00:42.0357991 +0000 UTC m=+140.243248188" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.058479 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.058987 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.558974942 +0000 UTC m=+140.766424029 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.160286 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.160623 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.660607727 +0000 UTC m=+140.868056815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.261086 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.261262 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.761236944 +0000 UTC m=+140.968686032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.261777 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.262038 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.762027219 +0000 UTC m=+140.969476307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.363146 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.363541 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.863528937 +0000 UTC m=+141.070978026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.464206 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.464514 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:42.964499379 +0000 UTC m=+141.171948466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.565627 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.565822 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.065799796 +0000 UTC m=+141.273248884 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.566116 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.566397 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.066389241 +0000 UTC m=+141.273838319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.611926 4583 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.645681 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.645734 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.658164 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.667097 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.667236 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.167222644 +0000 UTC m=+141.374671731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.667331 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.667617 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.167609806 +0000 UTC m=+141.375058895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.768628 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.768739 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.268701599 +0000 UTC m=+141.476150687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.768933 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.769197 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.269185244 +0000 UTC m=+141.476634332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.796374 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:42 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:42 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:42 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.796419 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.820785 4583 generic.go:334] "Generic (PLEG): container finished" podID="6fda0580-6dfb-41d7-b8a1-853b46605cfc" containerID="4987007ca34ed919d1ae78e7d38a908fe42404ed2e3322035c16331b3ea8c633" exitCode=0 Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.820852 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" event={"ID":"6fda0580-6dfb-41d7-b8a1-853b46605cfc","Type":"ContainerDied","Data":"4987007ca34ed919d1ae78e7d38a908fe42404ed2e3322035c16331b3ea8c633"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.822229 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" event={"ID":"b322636f-a692-4861-aad1-de2bee7eceee","Type":"ContainerStarted","Data":"14764f4e9158e18117ca0ea18535d833efdd0ac24f25efb91e4c205b0f5e2f1d"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.823887 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kdmx6" event={"ID":"2d81f678-276b-4f7c-82e2-d8ebc5c91c27","Type":"ContainerStarted","Data":"2ca4636718715ec67bd58edea15f464d008807062dcf6861182fa87cefc069a8"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.825791 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" event={"ID":"6baa2305-cdf4-4911-b91c-e26ee8ab9f64","Type":"ContainerStarted","Data":"07d4cfb85a21f71f7f3ceb3e711aed414ab1b00e80f53d718e9618ac78b2066b"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.825823 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" event={"ID":"6baa2305-cdf4-4911-b91c-e26ee8ab9f64","Type":"ContainerStarted","Data":"c4ca80e73e78b257a6beb5d6798cfea62d498a81214731eb39f91db9c0de78cb"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.827209 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bsppj" event={"ID":"f4a5d8ed-6566-47a6-9b04-0fd07e33c942","Type":"ContainerStarted","Data":"2d4a0529ba249d990bb66df60a259ca28558dc3dcdbf4da964e01ba060fd89f4"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.828495 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-hj8bv" event={"ID":"7867e90f-b983-4b90-a4de-3615ceeed726","Type":"ContainerStarted","Data":"29997e1fb944448808ecdcdb99d73d4193af8e7f75d0a4fc01fc8bdcca7e283b"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.830157 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-6j7kj" event={"ID":"bcb77eca-2637-4767-9144-25f235e20787","Type":"ContainerStarted","Data":"e9a5f50981008f9abddc0b3b63e2f99bd5ddec98c92eac6e1f91ca13f828cdb4"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.832163 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" event={"ID":"5811a3b1-0a2a-453d-834d-7da03848939f","Type":"ContainerStarted","Data":"5d56124a43887426b781992d830ab40743fe1efabe92ab13330cbf5db14e6c07"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.832214 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" event={"ID":"5811a3b1-0a2a-453d-834d-7da03848939f","Type":"ContainerStarted","Data":"63be7fceabc4330682f4b1a0c95d0540664d6f9dff6784ed58711b3526d1c85b"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.832226 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" event={"ID":"5811a3b1-0a2a-453d-834d-7da03848939f","Type":"ContainerStarted","Data":"2517fa695407b44d90b508a3cceee588524dbc36f4e538ddf61b3d1a0095d559"} Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.832951 4583 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nfx8b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.833028 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.836637 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hx7gd" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.837450 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vptkq" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.868089 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-d8gqr" podStartSLOduration=7.868074877 podStartE2EDuration="7.868074877s" podCreationTimestamp="2025-12-09 15:00:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:42.866889023 +0000 UTC m=+141.074338111" watchObservedRunningTime="2025-12-09 15:00:42.868074877 +0000 UTC m=+141.075523965" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.869679 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.869980 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.369964885 +0000 UTC m=+141.577413973 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.880293 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bhwzd" podStartSLOduration=120.880282856 podStartE2EDuration="2m0.880282856s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:42.879463786 +0000 UTC m=+141.086912874" watchObservedRunningTime="2025-12-09 15:00:42.880282856 +0000 UTC m=+141.087731944" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.898768 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" podStartSLOduration=120.898731508 podStartE2EDuration="2m0.898731508s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:42.895879229 +0000 UTC m=+141.103328317" watchObservedRunningTime="2025-12-09 15:00:42.898731508 +0000 UTC m=+141.106180597" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.972293 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:42 crc kubenswrapper[4583]: E1209 15:00:42.973051 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.473036905 +0000 UTC m=+141.680485994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.990065 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:42 crc kubenswrapper[4583]: I1209 15:00:42.990106 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.005695 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-p29td" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.073470 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:43 crc kubenswrapper[4583]: E1209 15:00:43.073640 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.573624984 +0000 UTC m=+141.781074072 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.073696 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: E1209 15:00:43.074064 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.574049246 +0000 UTC m=+141.781498334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.174591 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:43 crc kubenswrapper[4583]: E1209 15:00:43.174793 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.67474044 +0000 UTC m=+141.882189529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.174888 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: E1209 15:00:43.175141 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-09 15:00:43.67513134 +0000 UTC m=+141.882580428 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-csjlq" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.221054 4583 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-09T15:00:42.611954434Z","Handler":null,"Name":""} Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.223793 4583 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.223829 4583 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.275995 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.281509 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.377336 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.381010 4583 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.381068 4583 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.408262 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-csjlq\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.433320 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wpmv6"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.434066 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.436664 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.438269 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.441058 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wpmv6"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.468195 4583 patch_prober.go:28] interesting pod/apiserver-76f77b778f-zzls8 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]log ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]etcd ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/generic-apiserver-start-informers ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/max-in-flight-filter ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 09 15:00:43 crc kubenswrapper[4583]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 09 15:00:43 crc kubenswrapper[4583]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/project.openshift.io-projectcache ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/openshift.io-startinformers ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 09 15:00:43 crc kubenswrapper[4583]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 09 15:00:43 crc kubenswrapper[4583]: livez check failed Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.468240 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" podUID="6baa2305-cdf4-4911-b91c-e26ee8ab9f64" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.579436 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-catalog-content\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.579827 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-utilities\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.579931 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd6cn\" (UniqueName: \"kubernetes.io/projected/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-kube-api-access-jd6cn\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.611930 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-csjlq"] Dec 09 15:00:43 crc kubenswrapper[4583]: W1209 15:00:43.618383 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6163f29_831f_435e_8410_9f08f394ca5f.slice/crio-acb431f1812c120b9eaf5eb6566263c9396c5ab05301413662e297fb92b160e7 WatchSource:0}: Error finding container acb431f1812c120b9eaf5eb6566263c9396c5ab05301413662e297fb92b160e7: Status 404 returned error can't find the container with id acb431f1812c120b9eaf5eb6566263c9396c5ab05301413662e297fb92b160e7 Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.628140 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-92sgh"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.628902 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.630374 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.636439 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-92sgh"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.682099 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-catalog-content\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.682145 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-utilities\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.682195 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd6cn\" (UniqueName: \"kubernetes.io/projected/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-kube-api-access-jd6cn\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.682865 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-catalog-content\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.683072 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-utilities\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.698951 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd6cn\" (UniqueName: \"kubernetes.io/projected/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-kube-api-access-jd6cn\") pod \"community-operators-wpmv6\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.776635 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.783753 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcrj7\" (UniqueName: \"kubernetes.io/projected/baf822f8-4771-4b04-b412-f84f5a7bad3a-kube-api-access-zcrj7\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.784028 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-utilities\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.784076 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-catalog-content\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.797532 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:43 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:43 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:43 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.797774 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.826040 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zt9gk"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.827066 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.847186 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" event={"ID":"f6163f29-831f-435e-8410-9f08f394ca5f","Type":"ContainerStarted","Data":"d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4"} Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.847636 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" event={"ID":"f6163f29-831f-435e-8410-9f08f394ca5f","Type":"ContainerStarted","Data":"acb431f1812c120b9eaf5eb6566263c9396c5ab05301413662e297fb92b160e7"} Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.848195 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.848221 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zt9gk"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.851382 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.876730 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" podStartSLOduration=121.876717678 podStartE2EDuration="2m1.876717678s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:43.874821268 +0000 UTC m=+142.082270357" watchObservedRunningTime="2025-12-09 15:00:43.876717678 +0000 UTC m=+142.084166765" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.884884 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcrj7\" (UniqueName: \"kubernetes.io/projected/baf822f8-4771-4b04-b412-f84f5a7bad3a-kube-api-access-zcrj7\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.884976 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-utilities\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.885017 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-catalog-content\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.885422 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-catalog-content\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.885978 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-utilities\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.904419 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcrj7\" (UniqueName: \"kubernetes.io/projected/baf822f8-4771-4b04-b412-f84f5a7bad3a-kube-api-access-zcrj7\") pod \"certified-operators-92sgh\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.939877 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wpmv6"] Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.940184 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:00:43 crc kubenswrapper[4583]: W1209 15:00:43.949383 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9154b98b_4bb4_4c44_ba5a_9ae277a083e4.slice/crio-bd1793d0e06ea5d914bbd7ecaa5d44c5a26193a9eee06d857e2b4708ee654209 WatchSource:0}: Error finding container bd1793d0e06ea5d914bbd7ecaa5d44c5a26193a9eee06d857e2b4708ee654209: Status 404 returned error can't find the container with id bd1793d0e06ea5d914bbd7ecaa5d44c5a26193a9eee06d857e2b4708ee654209 Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.988636 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6cld\" (UniqueName: \"kubernetes.io/projected/4e760957-b628-4e38-8f70-804ae2d5da16-kube-api-access-r6cld\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.988739 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-catalog-content\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:43 crc kubenswrapper[4583]: I1209 15:00:43.991061 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-utilities\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.030781 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5v67z"] Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.032179 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.039738 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5v67z"] Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.085088 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.090150 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.093054 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6cld\" (UniqueName: \"kubernetes.io/projected/4e760957-b628-4e38-8f70-804ae2d5da16-kube-api-access-r6cld\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.093092 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-catalog-content\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.093144 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-utilities\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.093956 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-catalog-content\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.094062 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-utilities\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.109184 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6cld\" (UniqueName: \"kubernetes.io/projected/4e760957-b628-4e38-8f70-804ae2d5da16-kube-api-access-r6cld\") pod \"community-operators-zt9gk\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.127003 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-92sgh"] Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.143047 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:00:44 crc kubenswrapper[4583]: W1209 15:00:44.180171 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbaf822f8_4771_4b04_b412_f84f5a7bad3a.slice/crio-09c9705b7a1c367452028b547da1fc57db24a9757dfc00694ad4b92fc7b71676 WatchSource:0}: Error finding container 09c9705b7a1c367452028b547da1fc57db24a9757dfc00694ad4b92fc7b71676: Status 404 returned error can't find the container with id 09c9705b7a1c367452028b547da1fc57db24a9757dfc00694ad4b92fc7b71676 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.193637 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fda0580-6dfb-41d7-b8a1-853b46605cfc-config-volume\") pod \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.193721 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fda0580-6dfb-41d7-b8a1-853b46605cfc-secret-volume\") pod \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.193776 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76rxq\" (UniqueName: \"kubernetes.io/projected/6fda0580-6dfb-41d7-b8a1-853b46605cfc-kube-api-access-76rxq\") pod \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\" (UID: \"6fda0580-6dfb-41d7-b8a1-853b46605cfc\") " Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.194096 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-utilities\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.194123 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpmzs\" (UniqueName: \"kubernetes.io/projected/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-kube-api-access-vpmzs\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.194162 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-catalog-content\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.194589 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fda0580-6dfb-41d7-b8a1-853b46605cfc-config-volume" (OuterVolumeSpecName: "config-volume") pod "6fda0580-6dfb-41d7-b8a1-853b46605cfc" (UID: "6fda0580-6dfb-41d7-b8a1-853b46605cfc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.198648 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fda0580-6dfb-41d7-b8a1-853b46605cfc-kube-api-access-76rxq" (OuterVolumeSpecName: "kube-api-access-76rxq") pod "6fda0580-6dfb-41d7-b8a1-853b46605cfc" (UID: "6fda0580-6dfb-41d7-b8a1-853b46605cfc"). InnerVolumeSpecName "kube-api-access-76rxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.199230 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fda0580-6dfb-41d7-b8a1-853b46605cfc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6fda0580-6dfb-41d7-b8a1-853b46605cfc" (UID: "6fda0580-6dfb-41d7-b8a1-853b46605cfc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.275341 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zt9gk"] Dec 09 15:00:44 crc kubenswrapper[4583]: W1209 15:00:44.280339 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e760957_b628_4e38_8f70_804ae2d5da16.slice/crio-efe086b4b9a7dec858641c5b0b7a5670096c7499ad49d4b7157154b069341994 WatchSource:0}: Error finding container efe086b4b9a7dec858641c5b0b7a5670096c7499ad49d4b7157154b069341994: Status 404 returned error can't find the container with id efe086b4b9a7dec858641c5b0b7a5670096c7499ad49d4b7157154b069341994 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.294935 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-utilities\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.294975 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpmzs\" (UniqueName: \"kubernetes.io/projected/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-kube-api-access-vpmzs\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.295016 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-catalog-content\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.295117 4583 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fda0580-6dfb-41d7-b8a1-853b46605cfc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.295132 4583 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fda0580-6dfb-41d7-b8a1-853b46605cfc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.295148 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76rxq\" (UniqueName: \"kubernetes.io/projected/6fda0580-6dfb-41d7-b8a1-853b46605cfc-kube-api-access-76rxq\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.295346 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-utilities\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.295473 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-catalog-content\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.309440 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpmzs\" (UniqueName: \"kubernetes.io/projected/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-kube-api-access-vpmzs\") pod \"certified-operators-5v67z\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.348392 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.525452 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5v67z"] Dec 09 15:00:44 crc kubenswrapper[4583]: W1209 15:00:44.532084 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea23a6e4_d930_4e64_a260_38aaeb8e4dc7.slice/crio-603868efa61d5a0dce429cc83075f587dbe358fa2c66023f235cf8a6e61b1212 WatchSource:0}: Error finding container 603868efa61d5a0dce429cc83075f587dbe358fa2c66023f235cf8a6e61b1212: Status 404 returned error can't find the container with id 603868efa61d5a0dce429cc83075f587dbe358fa2c66023f235cf8a6e61b1212 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.797811 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:44 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:44 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:44 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.797876 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.851398 4583 generic.go:334] "Generic (PLEG): container finished" podID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerID="d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266" exitCode=0 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.851454 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerDied","Data":"d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.851479 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerStarted","Data":"603868efa61d5a0dce429cc83075f587dbe358fa2c66023f235cf8a6e61b1212"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.852840 4583 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.854191 4583 generic.go:334] "Generic (PLEG): container finished" podID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerID="43d01cc338ba81167f0721b9efb0a947353e406521c2c01474e81839992ab4cd" exitCode=0 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.854229 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerDied","Data":"43d01cc338ba81167f0721b9efb0a947353e406521c2c01474e81839992ab4cd"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.854245 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerStarted","Data":"09c9705b7a1c367452028b547da1fc57db24a9757dfc00694ad4b92fc7b71676"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.856121 4583 generic.go:334] "Generic (PLEG): container finished" podID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerID="2cf7f2a1ff512c04aa551945e043b09291bed9e0f31eaa8c97146b22636dc661" exitCode=0 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.856191 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpmv6" event={"ID":"9154b98b-4bb4-4c44-ba5a-9ae277a083e4","Type":"ContainerDied","Data":"2cf7f2a1ff512c04aa551945e043b09291bed9e0f31eaa8c97146b22636dc661"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.856219 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpmv6" event={"ID":"9154b98b-4bb4-4c44-ba5a-9ae277a083e4","Type":"ContainerStarted","Data":"bd1793d0e06ea5d914bbd7ecaa5d44c5a26193a9eee06d857e2b4708ee654209"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.857974 4583 generic.go:334] "Generic (PLEG): container finished" podID="4e760957-b628-4e38-8f70-804ae2d5da16" containerID="fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876" exitCode=0 Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.858086 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerDied","Data":"fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.858133 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerStarted","Data":"efe086b4b9a7dec858641c5b0b7a5670096c7499ad49d4b7157154b069341994"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.859802 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" event={"ID":"6fda0580-6dfb-41d7-b8a1-853b46605cfc","Type":"ContainerDied","Data":"7a66e511017cc11d27fd4532d134f64e80e9e80ccd17331ada8732ecc451fdf7"} Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.859841 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a66e511017cc11d27fd4532d134f64e80e9e80ccd17331ada8732ecc451fdf7" Dec 09 15:00:44 crc kubenswrapper[4583]: I1209 15:00:44.859918 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421540-fvs7m" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.629060 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fjmdc"] Dec 09 15:00:45 crc kubenswrapper[4583]: E1209 15:00:45.629242 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fda0580-6dfb-41d7-b8a1-853b46605cfc" containerName="collect-profiles" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.629258 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fda0580-6dfb-41d7-b8a1-853b46605cfc" containerName="collect-profiles" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.629355 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fda0580-6dfb-41d7-b8a1-853b46605cfc" containerName="collect-profiles" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.629959 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.631867 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.635623 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjmdc"] Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.716254 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-catalog-content\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.716426 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-utilities\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.716460 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc5hs\" (UniqueName: \"kubernetes.io/projected/71b4c55d-00ff-430c-9a13-5a5ad88e8231-kube-api-access-fc5hs\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.796393 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:45 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:45 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:45 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.796622 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.817984 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-catalog-content\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.818095 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-utilities\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.818128 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc5hs\" (UniqueName: \"kubernetes.io/projected/71b4c55d-00ff-430c-9a13-5a5ad88e8231-kube-api-access-fc5hs\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.818387 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-catalog-content\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.818463 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-utilities\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.833593 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc5hs\" (UniqueName: \"kubernetes.io/projected/71b4c55d-00ff-430c-9a13-5a5ad88e8231-kube-api-access-fc5hs\") pod \"redhat-marketplace-fjmdc\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:45 crc kubenswrapper[4583]: I1209 15:00:45.945612 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.026507 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vgc4p"] Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.030119 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.038414 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vgc4p"] Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.121157 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-utilities\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.121207 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-catalog-content\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.121265 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw8xn\" (UniqueName: \"kubernetes.io/projected/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-kube-api-access-zw8xn\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.225284 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw8xn\" (UniqueName: \"kubernetes.io/projected/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-kube-api-access-zw8xn\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.230971 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-utilities\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.231126 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-catalog-content\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.231416 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-utilities\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.231649 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-catalog-content\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.239992 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw8xn\" (UniqueName: \"kubernetes.io/projected/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-kube-api-access-zw8xn\") pod \"redhat-marketplace-vgc4p\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.350257 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.365467 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjmdc"] Dec 09 15:00:46 crc kubenswrapper[4583]: W1209 15:00:46.372885 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71b4c55d_00ff_430c_9a13_5a5ad88e8231.slice/crio-b99198f52d59806de7ac831c8570c309652a4667e18af9313e78707e9c4b1d8b WatchSource:0}: Error finding container b99198f52d59806de7ac831c8570c309652a4667e18af9313e78707e9c4b1d8b: Status 404 returned error can't find the container with id b99198f52d59806de7ac831c8570c309652a4667e18af9313e78707e9c4b1d8b Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.615477 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.618284 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.621359 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.623051 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.625288 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.637884 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9rxmd"] Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.638819 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.640259 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.643264 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9rxmd"] Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.726246 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vgc4p"] Dec 09 15:00:46 crc kubenswrapper[4583]: W1209 15:00:46.732359 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb38e7c7e_7c6b_464c_b50d_7b151bf404b5.slice/crio-c5cb258851e0b4139ae1117bc05872091d455a288b106bc581d9eded4989bd88 WatchSource:0}: Error finding container c5cb258851e0b4139ae1117bc05872091d455a288b106bc581d9eded4989bd88: Status 404 returned error can't find the container with id c5cb258851e0b4139ae1117bc05872091d455a288b106bc581d9eded4989bd88 Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.739342 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/169b4605-097f-4335-b814-f9f9845a2973-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.739374 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hr4q\" (UniqueName: \"kubernetes.io/projected/9c53c4b3-06a8-411e-9273-91b956969fb7-kube-api-access-6hr4q\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.739430 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/169b4605-097f-4335-b814-f9f9845a2973-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.739463 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-utilities\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.739480 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-catalog-content\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.799809 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:46 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:46 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:46 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.799853 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.840629 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/169b4605-097f-4335-b814-f9f9845a2973-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.840682 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-utilities\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.840704 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-catalog-content\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.840785 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/169b4605-097f-4335-b814-f9f9845a2973-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.840808 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hr4q\" (UniqueName: \"kubernetes.io/projected/9c53c4b3-06a8-411e-9273-91b956969fb7-kube-api-access-6hr4q\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.841137 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-utilities\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.841187 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/169b4605-097f-4335-b814-f9f9845a2973-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.841217 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-catalog-content\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.858834 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hr4q\" (UniqueName: \"kubernetes.io/projected/9c53c4b3-06a8-411e-9273-91b956969fb7-kube-api-access-6hr4q\") pod \"redhat-operators-9rxmd\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.863237 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/169b4605-097f-4335-b814-f9f9845a2973-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.874449 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerStarted","Data":"1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d"} Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.874490 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerStarted","Data":"c5cb258851e0b4139ae1117bc05872091d455a288b106bc581d9eded4989bd88"} Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.876306 4583 generic.go:334] "Generic (PLEG): container finished" podID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerID="91b86a545ea0229900da80ef81fe5ac48e3ee6dfc2c7f3ac093eb9d98ca14828" exitCode=0 Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.876338 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerDied","Data":"91b86a545ea0229900da80ef81fe5ac48e3ee6dfc2c7f3ac093eb9d98ca14828"} Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.876359 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerStarted","Data":"b99198f52d59806de7ac831c8570c309652a4667e18af9313e78707e9c4b1d8b"} Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.949308 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:46 crc kubenswrapper[4583]: I1209 15:00:46.956953 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.025550 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jzs9b"] Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.026441 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.041642 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jzs9b"] Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.144848 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9886\" (UniqueName: \"kubernetes.io/projected/a3091d34-d53f-4233-986c-dd854e1a00e7-kube-api-access-t9886\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.144887 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-catalog-content\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.144988 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-utilities\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.198235 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9rxmd"] Dec 09 15:00:47 crc kubenswrapper[4583]: W1209 15:00:47.223121 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c53c4b3_06a8_411e_9273_91b956969fb7.slice/crio-35b112174073274f9f8249ded1feb5961cdfb523c23aac0b7a4ad5eb3a5ef7c1 WatchSource:0}: Error finding container 35b112174073274f9f8249ded1feb5961cdfb523c23aac0b7a4ad5eb3a5ef7c1: Status 404 returned error can't find the container with id 35b112174073274f9f8249ded1feb5961cdfb523c23aac0b7a4ad5eb3a5ef7c1 Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.225146 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.246026 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-utilities\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.246073 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9886\" (UniqueName: \"kubernetes.io/projected/a3091d34-d53f-4233-986c-dd854e1a00e7-kube-api-access-t9886\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.246093 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-catalog-content\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.249130 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-catalog-content\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.249403 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-utilities\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.266106 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9886\" (UniqueName: \"kubernetes.io/projected/a3091d34-d53f-4233-986c-dd854e1a00e7-kube-api-access-t9886\") pod \"redhat-operators-jzs9b\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.345015 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.796314 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:47 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:47 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:47 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.796740 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.796943 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.804228 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jzs9b"] Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.826673 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-qw8lk" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.891234 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerStarted","Data":"882c4bd312746d39f3069834345fc84f7fa3218b3df0cf668d9fa92da3c348d6"} Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.898297 4583 generic.go:334] "Generic (PLEG): container finished" podID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerID="1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d" exitCode=0 Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.898375 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerDied","Data":"1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d"} Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.904950 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"169b4605-097f-4335-b814-f9f9845a2973","Type":"ContainerStarted","Data":"4df66a57415b9b1e2be4987280d0d9ecfdd040d5e4888a92bec51930c1ca0685"} Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.905039 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"169b4605-097f-4335-b814-f9f9845a2973","Type":"ContainerStarted","Data":"4c7df2785f78f6eaacf4b4e2e411fd4de92f44ab0fa6317745353ed85a6a7aee"} Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.908648 4583 generic.go:334] "Generic (PLEG): container finished" podID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerID="eb6dc8d24129e983b205860c8310e80e78fe213059cc6f3d446ce2fa21e2d115" exitCode=0 Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.908671 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rxmd" event={"ID":"9c53c4b3-06a8-411e-9273-91b956969fb7","Type":"ContainerDied","Data":"eb6dc8d24129e983b205860c8310e80e78fe213059cc6f3d446ce2fa21e2d115"} Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.908684 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rxmd" event={"ID":"9c53c4b3-06a8-411e-9273-91b956969fb7","Type":"ContainerStarted","Data":"35b112174073274f9f8249ded1feb5961cdfb523c23aac0b7a4ad5eb3a5ef7c1"} Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.930051 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.9300389839999998 podStartE2EDuration="1.930038984s" podCreationTimestamp="2025-12-09 15:00:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:00:47.929757871 +0000 UTC m=+146.137206960" watchObservedRunningTime="2025-12-09 15:00:47.930038984 +0000 UTC m=+146.137488072" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.955277 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.958578 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.993314 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:47 crc kubenswrapper[4583]: I1209 15:00:47.996620 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zzls8" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.058376 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.058458 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.058504 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.064861 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.075341 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.088100 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.190177 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.193575 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.199338 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.503733 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.504013 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.507679 4583 patch_prober.go:28] interesting pod/console-f9d7485db-n85fv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.507719 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-n85fv" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 09 15:00:48 crc kubenswrapper[4583]: W1209 15:00:48.691003 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-b60921a04b1813081c482ebac2c1c2d01f2bc493cc3f84ee3b4a2420ad60c76b WatchSource:0}: Error finding container b60921a04b1813081c482ebac2c1c2d01f2bc493cc3f84ee3b4a2420ad60c76b: Status 404 returned error can't find the container with id b60921a04b1813081c482ebac2c1c2d01f2bc493cc3f84ee3b4a2420ad60c76b Dec 09 15:00:48 crc kubenswrapper[4583]: W1209 15:00:48.727429 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-6015bbf1ba0f6637ed8efbc1d2d3ab6002b486ffd6c5bb6dcc3e226243c1775c WatchSource:0}: Error finding container 6015bbf1ba0f6637ed8efbc1d2d3ab6002b486ffd6c5bb6dcc3e226243c1775c: Status 404 returned error can't find the container with id 6015bbf1ba0f6637ed8efbc1d2d3ab6002b486ffd6c5bb6dcc3e226243c1775c Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.801577 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:48 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:48 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:48 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.801668 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.818009 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.818911 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.822667 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.826336 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.826858 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.913855 4583 generic.go:334] "Generic (PLEG): container finished" podID="169b4605-097f-4335-b814-f9f9845a2973" containerID="4df66a57415b9b1e2be4987280d0d9ecfdd040d5e4888a92bec51930c1ca0685" exitCode=0 Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.914040 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"169b4605-097f-4335-b814-f9f9845a2973","Type":"ContainerDied","Data":"4df66a57415b9b1e2be4987280d0d9ecfdd040d5e4888a92bec51930c1ca0685"} Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.915947 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2877ef26c5c996124e7b78a229977f3d86f9cb651c7385e98f80b935323079a7"} Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.915973 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b60921a04b1813081c482ebac2c1c2d01f2bc493cc3f84ee3b4a2420ad60c76b"} Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.917636 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d36b51744a521ffce84f67e9cc03cb55211944e4162763a8b3e5032f7b2a03ce"} Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.917663 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"6015bbf1ba0f6637ed8efbc1d2d3ab6002b486ffd6c5bb6dcc3e226243c1775c"} Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.919147 4583 generic.go:334] "Generic (PLEG): container finished" podID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerID="204eaf4392e0d923787204329a1bd64d0cdb4cfcae0fee5a6c2cb21ced89c780" exitCode=0 Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.919849 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerDied","Data":"204eaf4392e0d923787204329a1bd64d0cdb4cfcae0fee5a6c2cb21ced89c780"} Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.976149 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b223a317-7a69-4b6a-83e7-2cf7436858d5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:48 crc kubenswrapper[4583]: I1209 15:00:48.976359 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b223a317-7a69-4b6a-83e7-2cf7436858d5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.077309 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b223a317-7a69-4b6a-83e7-2cf7436858d5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.077494 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b223a317-7a69-4b6a-83e7-2cf7436858d5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.078383 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b223a317-7a69-4b6a-83e7-2cf7436858d5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.092680 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b223a317-7a69-4b6a-83e7-2cf7436858d5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.160113 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.796572 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:49 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:49 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:49 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:49 crc kubenswrapper[4583]: I1209 15:00:49.796840 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:50 crc kubenswrapper[4583]: I1209 15:00:50.795497 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:50 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:50 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:50 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:50 crc kubenswrapper[4583]: I1209 15:00:50.795549 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:51 crc kubenswrapper[4583]: I1209 15:00:51.795661 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:51 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:51 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:51 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:51 crc kubenswrapper[4583]: I1209 15:00:51.795708 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.642592 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.723389 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/169b4605-097f-4335-b814-f9f9845a2973-kubelet-dir\") pod \"169b4605-097f-4335-b814-f9f9845a2973\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.723479 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/169b4605-097f-4335-b814-f9f9845a2973-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "169b4605-097f-4335-b814-f9f9845a2973" (UID: "169b4605-097f-4335-b814-f9f9845a2973"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.723658 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/169b4605-097f-4335-b814-f9f9845a2973-kube-api-access\") pod \"169b4605-097f-4335-b814-f9f9845a2973\" (UID: \"169b4605-097f-4335-b814-f9f9845a2973\") " Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.723923 4583 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/169b4605-097f-4335-b814-f9f9845a2973-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.730894 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169b4605-097f-4335-b814-f9f9845a2973-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "169b4605-097f-4335-b814-f9f9845a2973" (UID: "169b4605-097f-4335-b814-f9f9845a2973"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.795792 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:52 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:52 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:52 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.795851 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.825179 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/169b4605-097f-4335-b814-f9f9845a2973-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.938171 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.950836 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"8815c81b72860a2ae0a66b313f450accad347f6b0003329eafa7cd86e2804ac8"} Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.950865 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5d008d15533d1d7662dc7af24895ab8bae1849f187f2618a3e5c92cb30324523"} Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.951132 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.953278 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"169b4605-097f-4335-b814-f9f9845a2973","Type":"ContainerDied","Data":"4c7df2785f78f6eaacf4b4e2e411fd4de92f44ab0fa6317745353ed85a6a7aee"} Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.953303 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c7df2785f78f6eaacf4b4e2e411fd4de92f44ab0fa6317745353ed85a6a7aee" Dec 09 15:00:52 crc kubenswrapper[4583]: I1209 15:00:52.953373 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 09 15:00:53 crc kubenswrapper[4583]: I1209 15:00:53.795499 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:53 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:53 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:53 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:53 crc kubenswrapper[4583]: I1209 15:00:53.795702 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:53 crc kubenswrapper[4583]: I1209 15:00:53.857025 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-bsppj" Dec 09 15:00:53 crc kubenswrapper[4583]: I1209 15:00:53.960030 4583 generic.go:334] "Generic (PLEG): container finished" podID="b223a317-7a69-4b6a-83e7-2cf7436858d5" containerID="b2782adc47d4e6e78dd145255b000942dad7868d10d4395d071a9173e90a47bc" exitCode=0 Dec 09 15:00:53 crc kubenswrapper[4583]: I1209 15:00:53.960786 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b223a317-7a69-4b6a-83e7-2cf7436858d5","Type":"ContainerDied","Data":"b2782adc47d4e6e78dd145255b000942dad7868d10d4395d071a9173e90a47bc"} Dec 09 15:00:53 crc kubenswrapper[4583]: I1209 15:00:53.960814 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b223a317-7a69-4b6a-83e7-2cf7436858d5","Type":"ContainerStarted","Data":"96c4e96e4915208994d29082734b400cb55341e6c2da178ac194804f4623bf48"} Dec 09 15:00:54 crc kubenswrapper[4583]: I1209 15:00:54.794721 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:54 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:54 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:54 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:54 crc kubenswrapper[4583]: I1209 15:00:54.794801 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.127591 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.253891 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b223a317-7a69-4b6a-83e7-2cf7436858d5-kube-api-access\") pod \"b223a317-7a69-4b6a-83e7-2cf7436858d5\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.253938 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b223a317-7a69-4b6a-83e7-2cf7436858d5-kubelet-dir\") pod \"b223a317-7a69-4b6a-83e7-2cf7436858d5\" (UID: \"b223a317-7a69-4b6a-83e7-2cf7436858d5\") " Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.254125 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b223a317-7a69-4b6a-83e7-2cf7436858d5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b223a317-7a69-4b6a-83e7-2cf7436858d5" (UID: "b223a317-7a69-4b6a-83e7-2cf7436858d5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.254301 4583 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b223a317-7a69-4b6a-83e7-2cf7436858d5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.259304 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b223a317-7a69-4b6a-83e7-2cf7436858d5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b223a317-7a69-4b6a-83e7-2cf7436858d5" (UID: "b223a317-7a69-4b6a-83e7-2cf7436858d5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.355846 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b223a317-7a69-4b6a-83e7-2cf7436858d5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.796048 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:55 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:55 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:55 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.796425 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.972254 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b223a317-7a69-4b6a-83e7-2cf7436858d5","Type":"ContainerDied","Data":"96c4e96e4915208994d29082734b400cb55341e6c2da178ac194804f4623bf48"} Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.972353 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96c4e96e4915208994d29082734b400cb55341e6c2da178ac194804f4623bf48" Dec 09 15:00:55 crc kubenswrapper[4583]: I1209 15:00:55.972459 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 09 15:00:56 crc kubenswrapper[4583]: I1209 15:00:56.794978 4583 patch_prober.go:28] interesting pod/router-default-5444994796-lhdvm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 09 15:00:56 crc kubenswrapper[4583]: [-]has-synced failed: reason withheld Dec 09 15:00:56 crc kubenswrapper[4583]: [+]process-running ok Dec 09 15:00:56 crc kubenswrapper[4583]: healthz check failed Dec 09 15:00:56 crc kubenswrapper[4583]: I1209 15:00:56.795031 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lhdvm" podUID="68cd346a-3a57-473b-8a06-9e3823688014" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 09 15:00:57 crc kubenswrapper[4583]: I1209 15:00:57.796659 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:57 crc kubenswrapper[4583]: I1209 15:00:57.799613 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-lhdvm" Dec 09 15:00:58 crc kubenswrapper[4583]: I1209 15:00:58.504029 4583 patch_prober.go:28] interesting pod/console-f9d7485db-n85fv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 09 15:00:58 crc kubenswrapper[4583]: I1209 15:00:58.504264 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-n85fv" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 09 15:00:59 crc kubenswrapper[4583]: I1209 15:00:59.391953 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:00:59 crc kubenswrapper[4583]: I1209 15:00:59.392008 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:01:03 crc kubenswrapper[4583]: I1209 15:01:03.448586 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:01:04 crc kubenswrapper[4583]: I1209 15:01:04.175373 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:01:04 crc kubenswrapper[4583]: I1209 15:01:04.183338 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5848176-adf1-47d0-b043-1b305357d808-metrics-certs\") pod \"network-metrics-daemon-9j9s6\" (UID: \"b5848176-adf1-47d0-b043-1b305357d808\") " pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:01:04 crc kubenswrapper[4583]: I1209 15:01:04.404111 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9j9s6" Dec 09 15:01:08 crc kubenswrapper[4583]: I1209 15:01:08.507054 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:01:08 crc kubenswrapper[4583]: I1209 15:01:08.510313 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.041142 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerStarted","Data":"5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4"} Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.045912 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerStarted","Data":"3760c142223acd98eabfde24eaf3c127a8499b3d2e578f1badf9fcf74ef29780"} Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.048023 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerStarted","Data":"aa11777b7d0ce523bca025ed7094887326d10c3736715c04a64ead10b53bae3e"} Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.050877 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerStarted","Data":"cdba763bcf61fe9c876c522fba8be2bccc4a25171ef6029b1c1d0a1a1ce06c65"} Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.052227 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerStarted","Data":"639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd"} Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.057653 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerStarted","Data":"46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d"} Dec 09 15:01:09 crc kubenswrapper[4583]: I1209 15:01:09.194296 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9j9s6"] Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.061532 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" event={"ID":"b5848176-adf1-47d0-b043-1b305357d808","Type":"ContainerStarted","Data":"85a6aa3b61f488b5e2b6f4707a2fb7351a083ebb941e0993f96d9a163e5d889b"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.061734 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" event={"ID":"b5848176-adf1-47d0-b043-1b305357d808","Type":"ContainerStarted","Data":"6cd5642fdab833da6e25210e6a16c7940520bb160496a584e9451495ba416655"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.061774 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9j9s6" event={"ID":"b5848176-adf1-47d0-b043-1b305357d808","Type":"ContainerStarted","Data":"2d678df5156fd088e8db6e02873b0f5c98abcefcf6c278d0c6ca93a92b54a7fa"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.064403 4583 generic.go:334] "Generic (PLEG): container finished" podID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerID="cdba763bcf61fe9c876c522fba8be2bccc4a25171ef6029b1c1d0a1a1ce06c65" exitCode=0 Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.064471 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerDied","Data":"cdba763bcf61fe9c876c522fba8be2bccc4a25171ef6029b1c1d0a1a1ce06c65"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.066080 4583 generic.go:334] "Generic (PLEG): container finished" podID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerID="639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd" exitCode=0 Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.066127 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerDied","Data":"639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.068879 4583 generic.go:334] "Generic (PLEG): container finished" podID="4e760957-b628-4e38-8f70-804ae2d5da16" containerID="46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d" exitCode=0 Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.068956 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerDied","Data":"46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.071827 4583 generic.go:334] "Generic (PLEG): container finished" podID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerID="5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4" exitCode=0 Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.071899 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerDied","Data":"5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.074773 4583 generic.go:334] "Generic (PLEG): container finished" podID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerID="3760c142223acd98eabfde24eaf3c127a8499b3d2e578f1badf9fcf74ef29780" exitCode=0 Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.075500 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerDied","Data":"3760c142223acd98eabfde24eaf3c127a8499b3d2e578f1badf9fcf74ef29780"} Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.079312 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-9j9s6" podStartSLOduration=148.079303979 podStartE2EDuration="2m28.079303979s" podCreationTimestamp="2025-12-09 14:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:01:10.078376164 +0000 UTC m=+168.285825252" watchObservedRunningTime="2025-12-09 15:01:10.079303979 +0000 UTC m=+168.286753067" Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.090380 4583 generic.go:334] "Generic (PLEG): container finished" podID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerID="aa11777b7d0ce523bca025ed7094887326d10c3736715c04a64ead10b53bae3e" exitCode=0 Dec 09 15:01:10 crc kubenswrapper[4583]: I1209 15:01:10.092244 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerDied","Data":"aa11777b7d0ce523bca025ed7094887326d10c3736715c04a64ead10b53bae3e"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.097204 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerStarted","Data":"789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.099230 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerStarted","Data":"6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.101200 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerStarted","Data":"75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.102854 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerStarted","Data":"7ebb06978b71f4541a39226f7e824946bfe334d0794a6205d0585be7249990c8"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.105110 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerStarted","Data":"c3ebc45d1291e85361c66fb94ab746ac156bd9e8fc6eb1f0d6cdf3daba95325c"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.106774 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerStarted","Data":"3a0a476ec5af82d678050a743d7bc094001bdfc66b29cf8c08a4afdb5e130d0e"} Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.122524 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vgc4p" podStartSLOduration=2.303108745 podStartE2EDuration="25.12251188s" podCreationTimestamp="2025-12-09 15:00:46 +0000 UTC" firstStartedPulling="2025-12-09 15:00:47.90301108 +0000 UTC m=+146.110460168" lastFinishedPulling="2025-12-09 15:01:10.722414215 +0000 UTC m=+168.929863303" observedRunningTime="2025-12-09 15:01:11.118816626 +0000 UTC m=+169.326265714" watchObservedRunningTime="2025-12-09 15:01:11.12251188 +0000 UTC m=+169.329960968" Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.149044 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zt9gk" podStartSLOduration=2.330751312 podStartE2EDuration="28.149032472s" podCreationTimestamp="2025-12-09 15:00:43 +0000 UTC" firstStartedPulling="2025-12-09 15:00:44.858935859 +0000 UTC m=+143.066384947" lastFinishedPulling="2025-12-09 15:01:10.677217029 +0000 UTC m=+168.884666107" observedRunningTime="2025-12-09 15:01:11.148998788 +0000 UTC m=+169.356447876" watchObservedRunningTime="2025-12-09 15:01:11.149032472 +0000 UTC m=+169.356481560" Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.149344 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5v67z" podStartSLOduration=1.4755475040000001 podStartE2EDuration="27.149339514s" podCreationTimestamp="2025-12-09 15:00:44 +0000 UTC" firstStartedPulling="2025-12-09 15:00:44.852596448 +0000 UTC m=+143.060045536" lastFinishedPulling="2025-12-09 15:01:10.526388468 +0000 UTC m=+168.733837546" observedRunningTime="2025-12-09 15:01:11.135934308 +0000 UTC m=+169.343383397" watchObservedRunningTime="2025-12-09 15:01:11.149339514 +0000 UTC m=+169.356788602" Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.166584 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fjmdc" podStartSLOduration=2.470344725 podStartE2EDuration="26.166566734s" podCreationTimestamp="2025-12-09 15:00:45 +0000 UTC" firstStartedPulling="2025-12-09 15:00:46.877271176 +0000 UTC m=+145.084720264" lastFinishedPulling="2025-12-09 15:01:10.573493185 +0000 UTC m=+168.780942273" observedRunningTime="2025-12-09 15:01:11.163575842 +0000 UTC m=+169.371024930" watchObservedRunningTime="2025-12-09 15:01:11.166566734 +0000 UTC m=+169.374015823" Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.188295 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jzs9b" podStartSLOduration=5.522997064 podStartE2EDuration="24.188286338s" podCreationTimestamp="2025-12-09 15:00:47 +0000 UTC" firstStartedPulling="2025-12-09 15:00:52.050238627 +0000 UTC m=+150.257687714" lastFinishedPulling="2025-12-09 15:01:10.7155279 +0000 UTC m=+168.922976988" observedRunningTime="2025-12-09 15:01:11.188278964 +0000 UTC m=+169.395728052" watchObservedRunningTime="2025-12-09 15:01:11.188286338 +0000 UTC m=+169.395735426" Dec 09 15:01:11 crc kubenswrapper[4583]: I1209 15:01:11.203615 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-92sgh" podStartSLOduration=2.429634121 podStartE2EDuration="28.203600737s" podCreationTimestamp="2025-12-09 15:00:43 +0000 UTC" firstStartedPulling="2025-12-09 15:00:44.85507022 +0000 UTC m=+143.062519309" lastFinishedPulling="2025-12-09 15:01:10.629036837 +0000 UTC m=+168.836485925" observedRunningTime="2025-12-09 15:01:11.201437092 +0000 UTC m=+169.408886179" watchObservedRunningTime="2025-12-09 15:01:11.203600737 +0000 UTC m=+169.411049825" Dec 09 15:01:13 crc kubenswrapper[4583]: I1209 15:01:13.941026 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:01:13 crc kubenswrapper[4583]: I1209 15:01:13.941231 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.029375 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.143972 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.144689 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.176871 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.349397 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.349950 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:01:14 crc kubenswrapper[4583]: I1209 15:01:14.375904 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:01:15 crc kubenswrapper[4583]: I1209 15:01:15.149376 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:01:15 crc kubenswrapper[4583]: I1209 15:01:15.150890 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:01:15 crc kubenswrapper[4583]: I1209 15:01:15.946050 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:01:15 crc kubenswrapper[4583]: I1209 15:01:15.946233 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:01:15 crc kubenswrapper[4583]: I1209 15:01:15.971608 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:01:16 crc kubenswrapper[4583]: I1209 15:01:16.126367 4583 generic.go:334] "Generic (PLEG): container finished" podID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerID="ca2e52e4d83a64f59cde887bc204b883c2891e398a6c7dad199854a02c7a1b9f" exitCode=0 Dec 09 15:01:16 crc kubenswrapper[4583]: I1209 15:01:16.126406 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rxmd" event={"ID":"9c53c4b3-06a8-411e-9273-91b956969fb7","Type":"ContainerDied","Data":"ca2e52e4d83a64f59cde887bc204b883c2891e398a6c7dad199854a02c7a1b9f"} Dec 09 15:01:16 crc kubenswrapper[4583]: I1209 15:01:16.153319 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:01:16 crc kubenswrapper[4583]: I1209 15:01:16.352106 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:01:16 crc kubenswrapper[4583]: I1209 15:01:16.352139 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:01:16 crc kubenswrapper[4583]: I1209 15:01:16.380416 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:01:17 crc kubenswrapper[4583]: I1209 15:01:17.132954 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rxmd" event={"ID":"9c53c4b3-06a8-411e-9273-91b956969fb7","Type":"ContainerStarted","Data":"238104d3cd5d5e80e46f5665e346726f75affe4473cdb7bed58e1bc157301ec2"} Dec 09 15:01:17 crc kubenswrapper[4583]: I1209 15:01:17.151252 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9rxmd" podStartSLOduration=2.406090955 podStartE2EDuration="31.151236836s" podCreationTimestamp="2025-12-09 15:00:46 +0000 UTC" firstStartedPulling="2025-12-09 15:00:47.91048554 +0000 UTC m=+146.117934628" lastFinishedPulling="2025-12-09 15:01:16.65563142 +0000 UTC m=+174.863080509" observedRunningTime="2025-12-09 15:01:17.148600577 +0000 UTC m=+175.356049664" watchObservedRunningTime="2025-12-09 15:01:17.151236836 +0000 UTC m=+175.358685924" Dec 09 15:01:17 crc kubenswrapper[4583]: I1209 15:01:17.161510 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:01:17 crc kubenswrapper[4583]: I1209 15:01:17.346223 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:01:17 crc kubenswrapper[4583]: I1209 15:01:17.346262 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:01:17 crc kubenswrapper[4583]: I1209 15:01:17.373037 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:01:18 crc kubenswrapper[4583]: I1209 15:01:18.137941 4583 generic.go:334] "Generic (PLEG): container finished" podID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerID="e9d530425ba82ad1ef934b90f7a5610d2dc0f665ae64c2fdcaab3bf436b92b72" exitCode=0 Dec 09 15:01:18 crc kubenswrapper[4583]: I1209 15:01:18.137974 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpmv6" event={"ID":"9154b98b-4bb4-4c44-ba5a-9ae277a083e4","Type":"ContainerDied","Data":"e9d530425ba82ad1ef934b90f7a5610d2dc0f665ae64c2fdcaab3bf436b92b72"} Dec 09 15:01:18 crc kubenswrapper[4583]: I1209 15:01:18.165017 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:01:18 crc kubenswrapper[4583]: I1209 15:01:18.512627 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qdzns" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.143422 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpmv6" event={"ID":"9154b98b-4bb4-4c44-ba5a-9ae277a083e4","Type":"ContainerStarted","Data":"bbfd10fb1ec1fd940fe2ede82c64c663b7df1a4dd5eb72b9e0d53a42db2fcb9b"} Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.157627 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wpmv6" podStartSLOduration=2.362204831 podStartE2EDuration="36.157618908s" podCreationTimestamp="2025-12-09 15:00:43 +0000 UTC" firstStartedPulling="2025-12-09 15:00:44.858131425 +0000 UTC m=+143.065580513" lastFinishedPulling="2025-12-09 15:01:18.653545502 +0000 UTC m=+176.860994590" observedRunningTime="2025-12-09 15:01:19.154863081 +0000 UTC m=+177.362312169" watchObservedRunningTime="2025-12-09 15:01:19.157618908 +0000 UTC m=+177.365067996" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.268861 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5v67z"] Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.269034 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5v67z" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="registry-server" containerID="cri-o://75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656" gracePeriod=2 Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.468508 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zt9gk"] Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.468700 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zt9gk" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="registry-server" containerID="cri-o://6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27" gracePeriod=2 Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.650526 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.767892 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-catalog-content\") pod \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.768127 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpmzs\" (UniqueName: \"kubernetes.io/projected/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-kube-api-access-vpmzs\") pod \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.768149 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-utilities\") pod \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\" (UID: \"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7\") " Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.769378 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-utilities" (OuterVolumeSpecName: "utilities") pod "ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" (UID: "ea23a6e4-d930-4e64-a260-38aaeb8e4dc7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.772615 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-kube-api-access-vpmzs" (OuterVolumeSpecName: "kube-api-access-vpmzs") pod "ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" (UID: "ea23a6e4-d930-4e64-a260-38aaeb8e4dc7"). InnerVolumeSpecName "kube-api-access-vpmzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.806135 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" (UID: "ea23a6e4-d930-4e64-a260-38aaeb8e4dc7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.854611 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.870773 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpmzs\" (UniqueName: \"kubernetes.io/projected/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-kube-api-access-vpmzs\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.870814 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.870825 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.972000 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6cld\" (UniqueName: \"kubernetes.io/projected/4e760957-b628-4e38-8f70-804ae2d5da16-kube-api-access-r6cld\") pod \"4e760957-b628-4e38-8f70-804ae2d5da16\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.972036 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-catalog-content\") pod \"4e760957-b628-4e38-8f70-804ae2d5da16\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.972084 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-utilities\") pod \"4e760957-b628-4e38-8f70-804ae2d5da16\" (UID: \"4e760957-b628-4e38-8f70-804ae2d5da16\") " Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.972725 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-utilities" (OuterVolumeSpecName: "utilities") pod "4e760957-b628-4e38-8f70-804ae2d5da16" (UID: "4e760957-b628-4e38-8f70-804ae2d5da16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:19 crc kubenswrapper[4583]: I1209 15:01:19.974236 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e760957-b628-4e38-8f70-804ae2d5da16-kube-api-access-r6cld" (OuterVolumeSpecName: "kube-api-access-r6cld") pod "4e760957-b628-4e38-8f70-804ae2d5da16" (UID: "4e760957-b628-4e38-8f70-804ae2d5da16"). InnerVolumeSpecName "kube-api-access-r6cld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.008780 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e760957-b628-4e38-8f70-804ae2d5da16" (UID: "4e760957-b628-4e38-8f70-804ae2d5da16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.073568 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6cld\" (UniqueName: \"kubernetes.io/projected/4e760957-b628-4e38-8f70-804ae2d5da16-kube-api-access-r6cld\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.073600 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.073609 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e760957-b628-4e38-8f70-804ae2d5da16-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.150032 4583 generic.go:334] "Generic (PLEG): container finished" podID="4e760957-b628-4e38-8f70-804ae2d5da16" containerID="6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27" exitCode=0 Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.150088 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerDied","Data":"6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27"} Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.150113 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zt9gk" event={"ID":"4e760957-b628-4e38-8f70-804ae2d5da16","Type":"ContainerDied","Data":"efe086b4b9a7dec858641c5b0b7a5670096c7499ad49d4b7157154b069341994"} Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.150128 4583 scope.go:117] "RemoveContainer" containerID="6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.150228 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zt9gk" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.154607 4583 generic.go:334] "Generic (PLEG): container finished" podID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerID="75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656" exitCode=0 Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.155174 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5v67z" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.155555 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerDied","Data":"75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656"} Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.155572 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5v67z" event={"ID":"ea23a6e4-d930-4e64-a260-38aaeb8e4dc7","Type":"ContainerDied","Data":"603868efa61d5a0dce429cc83075f587dbe358fa2c66023f235cf8a6e61b1212"} Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.162505 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zt9gk"] Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.166154 4583 scope.go:117] "RemoveContainer" containerID="46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.171637 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zt9gk"] Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.174404 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5v67z"] Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.176558 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5v67z"] Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.192907 4583 scope.go:117] "RemoveContainer" containerID="fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.202861 4583 scope.go:117] "RemoveContainer" containerID="6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27" Dec 09 15:01:20 crc kubenswrapper[4583]: E1209 15:01:20.203110 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27\": container with ID starting with 6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27 not found: ID does not exist" containerID="6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.203138 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27"} err="failed to get container status \"6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27\": rpc error: code = NotFound desc = could not find container \"6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27\": container with ID starting with 6b5072d674687b4a9270f7a11d78aae7f60dc12d1ae31fea9da6a993bd6b7e27 not found: ID does not exist" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.203168 4583 scope.go:117] "RemoveContainer" containerID="46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d" Dec 09 15:01:20 crc kubenswrapper[4583]: E1209 15:01:20.204017 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d\": container with ID starting with 46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d not found: ID does not exist" containerID="46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.204037 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d"} err="failed to get container status \"46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d\": rpc error: code = NotFound desc = could not find container \"46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d\": container with ID starting with 46b73ccd4380c0cba4e55a569a8f3f262c1276da0a8f2977d4a3e159c655259d not found: ID does not exist" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.204050 4583 scope.go:117] "RemoveContainer" containerID="fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876" Dec 09 15:01:20 crc kubenswrapper[4583]: E1209 15:01:20.204297 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876\": container with ID starting with fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876 not found: ID does not exist" containerID="fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.204327 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876"} err="failed to get container status \"fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876\": rpc error: code = NotFound desc = could not find container \"fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876\": container with ID starting with fb750e9b1ba501de5ce31552e8f7d0ca4f5dec0489a2cf2733b9cb430b63b876 not found: ID does not exist" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.204346 4583 scope.go:117] "RemoveContainer" containerID="75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.216853 4583 scope.go:117] "RemoveContainer" containerID="5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.228896 4583 scope.go:117] "RemoveContainer" containerID="d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.238374 4583 scope.go:117] "RemoveContainer" containerID="75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656" Dec 09 15:01:20 crc kubenswrapper[4583]: E1209 15:01:20.238648 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656\": container with ID starting with 75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656 not found: ID does not exist" containerID="75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.238722 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656"} err="failed to get container status \"75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656\": rpc error: code = NotFound desc = could not find container \"75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656\": container with ID starting with 75c5966d2c76867ee43a04fe804316e0972200b1dc177ecdb790013887362656 not found: ID does not exist" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.238808 4583 scope.go:117] "RemoveContainer" containerID="5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4" Dec 09 15:01:20 crc kubenswrapper[4583]: E1209 15:01:20.239068 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4\": container with ID starting with 5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4 not found: ID does not exist" containerID="5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.239099 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4"} err="failed to get container status \"5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4\": rpc error: code = NotFound desc = could not find container \"5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4\": container with ID starting with 5485be87f286e102ee0648fdc744342faa177bf2f05ac3ae8d4bdce07bb4c2c4 not found: ID does not exist" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.239118 4583 scope.go:117] "RemoveContainer" containerID="d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266" Dec 09 15:01:20 crc kubenswrapper[4583]: E1209 15:01:20.239410 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266\": container with ID starting with d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266 not found: ID does not exist" containerID="d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266" Dec 09 15:01:20 crc kubenswrapper[4583]: I1209 15:01:20.239441 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266"} err="failed to get container status \"d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266\": rpc error: code = NotFound desc = could not find container \"d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266\": container with ID starting with d1df715ae1c75f6d18b86b7be28544893109a7da9e2ce0fea4e434ba49edc266 not found: ID does not exist" Dec 09 15:01:21 crc kubenswrapper[4583]: I1209 15:01:21.668439 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vgc4p"] Dec 09 15:01:21 crc kubenswrapper[4583]: I1209 15:01:21.668624 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vgc4p" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="registry-server" containerID="cri-o://789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694" gracePeriod=2 Dec 09 15:01:21 crc kubenswrapper[4583]: I1209 15:01:21.869885 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jzs9b"] Dec 09 15:01:21 crc kubenswrapper[4583]: I1209 15:01:21.870067 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jzs9b" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="registry-server" containerID="cri-o://3a0a476ec5af82d678050a743d7bc094001bdfc66b29cf8c08a4afdb5e130d0e" gracePeriod=2 Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.034913 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.085564 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" path="/var/lib/kubelet/pods/4e760957-b628-4e38-8f70-804ae2d5da16/volumes" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.086385 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" path="/var/lib/kubelet/pods/ea23a6e4-d930-4e64-a260-38aaeb8e4dc7/volumes" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.093330 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-utilities\") pod \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.093370 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-catalog-content\") pod \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.093428 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw8xn\" (UniqueName: \"kubernetes.io/projected/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-kube-api-access-zw8xn\") pod \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\" (UID: \"b38e7c7e-7c6b-464c-b50d-7b151bf404b5\") " Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.094173 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-utilities" (OuterVolumeSpecName: "utilities") pod "b38e7c7e-7c6b-464c-b50d-7b151bf404b5" (UID: "b38e7c7e-7c6b-464c-b50d-7b151bf404b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.096152 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-kube-api-access-zw8xn" (OuterVolumeSpecName: "kube-api-access-zw8xn") pod "b38e7c7e-7c6b-464c-b50d-7b151bf404b5" (UID: "b38e7c7e-7c6b-464c-b50d-7b151bf404b5"). InnerVolumeSpecName "kube-api-access-zw8xn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.109706 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b38e7c7e-7c6b-464c-b50d-7b151bf404b5" (UID: "b38e7c7e-7c6b-464c-b50d-7b151bf404b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.166037 4583 generic.go:334] "Generic (PLEG): container finished" podID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerID="789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694" exitCode=0 Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.166092 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vgc4p" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.166107 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerDied","Data":"789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694"} Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.166326 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vgc4p" event={"ID":"b38e7c7e-7c6b-464c-b50d-7b151bf404b5","Type":"ContainerDied","Data":"c5cb258851e0b4139ae1117bc05872091d455a288b106bc581d9eded4989bd88"} Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.166344 4583 scope.go:117] "RemoveContainer" containerID="789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.170925 4583 generic.go:334] "Generic (PLEG): container finished" podID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerID="3a0a476ec5af82d678050a743d7bc094001bdfc66b29cf8c08a4afdb5e130d0e" exitCode=0 Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.170949 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerDied","Data":"3a0a476ec5af82d678050a743d7bc094001bdfc66b29cf8c08a4afdb5e130d0e"} Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.185143 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vgc4p"] Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.185543 4583 scope.go:117] "RemoveContainer" containerID="639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.187664 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vgc4p"] Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.194302 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.194326 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.194337 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw8xn\" (UniqueName: \"kubernetes.io/projected/b38e7c7e-7c6b-464c-b50d-7b151bf404b5-kube-api-access-zw8xn\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.218141 4583 scope.go:117] "RemoveContainer" containerID="1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.228447 4583 scope.go:117] "RemoveContainer" containerID="789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694" Dec 09 15:01:22 crc kubenswrapper[4583]: E1209 15:01:22.228703 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694\": container with ID starting with 789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694 not found: ID does not exist" containerID="789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.228733 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694"} err="failed to get container status \"789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694\": rpc error: code = NotFound desc = could not find container \"789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694\": container with ID starting with 789730b99121d61ceec31e7e6296b1fbe31a54e9276712e6c0baad75020ee694 not found: ID does not exist" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.228770 4583 scope.go:117] "RemoveContainer" containerID="639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd" Dec 09 15:01:22 crc kubenswrapper[4583]: E1209 15:01:22.229017 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd\": container with ID starting with 639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd not found: ID does not exist" containerID="639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.229046 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd"} err="failed to get container status \"639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd\": rpc error: code = NotFound desc = could not find container \"639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd\": container with ID starting with 639277f2158fdfb4c53c5a72a1373749548f46c8755b89cb1e4770012e3933cd not found: ID does not exist" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.229086 4583 scope.go:117] "RemoveContainer" containerID="1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d" Dec 09 15:01:22 crc kubenswrapper[4583]: E1209 15:01:22.229666 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d\": container with ID starting with 1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d not found: ID does not exist" containerID="1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.229690 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d"} err="failed to get container status \"1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d\": rpc error: code = NotFound desc = could not find container \"1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d\": container with ID starting with 1aa160265da60827f508fb2325d79ce26a31a356fcfc16781556effb45f95d0d not found: ID does not exist" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.235515 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.396609 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9886\" (UniqueName: \"kubernetes.io/projected/a3091d34-d53f-4233-986c-dd854e1a00e7-kube-api-access-t9886\") pod \"a3091d34-d53f-4233-986c-dd854e1a00e7\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.396649 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-utilities\") pod \"a3091d34-d53f-4233-986c-dd854e1a00e7\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.396668 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-catalog-content\") pod \"a3091d34-d53f-4233-986c-dd854e1a00e7\" (UID: \"a3091d34-d53f-4233-986c-dd854e1a00e7\") " Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.397419 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-utilities" (OuterVolumeSpecName: "utilities") pod "a3091d34-d53f-4233-986c-dd854e1a00e7" (UID: "a3091d34-d53f-4233-986c-dd854e1a00e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.400676 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3091d34-d53f-4233-986c-dd854e1a00e7-kube-api-access-t9886" (OuterVolumeSpecName: "kube-api-access-t9886") pod "a3091d34-d53f-4233-986c-dd854e1a00e7" (UID: "a3091d34-d53f-4233-986c-dd854e1a00e7"). InnerVolumeSpecName "kube-api-access-t9886". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.479887 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3091d34-d53f-4233-986c-dd854e1a00e7" (UID: "a3091d34-d53f-4233-986c-dd854e1a00e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.498051 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9886\" (UniqueName: \"kubernetes.io/projected/a3091d34-d53f-4233-986c-dd854e1a00e7-kube-api-access-t9886\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.498091 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:22 crc kubenswrapper[4583]: I1209 15:01:22.498100 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3091d34-d53f-4233-986c-dd854e1a00e7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016027 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016407 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016420 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016429 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016434 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016440 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016446 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016455 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016460 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016467 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016472 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016481 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016487 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016494 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169b4605-097f-4335-b814-f9f9845a2973" containerName="pruner" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016499 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="169b4605-097f-4335-b814-f9f9845a2973" containerName="pruner" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016506 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016511 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016518 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016522 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="extract-utilities" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016528 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b223a317-7a69-4b6a-83e7-2cf7436858d5" containerName="pruner" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016533 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="b223a317-7a69-4b6a-83e7-2cf7436858d5" containerName="pruner" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016540 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016546 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016553 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016558 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016565 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016570 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: E1209 15:01:23.016577 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016581 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="extract-content" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016671 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea23a6e4-d930-4e64-a260-38aaeb8e4dc7" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016679 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016686 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="b223a317-7a69-4b6a-83e7-2cf7436858d5" containerName="pruner" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016694 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e760957-b628-4e38-8f70-804ae2d5da16" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016701 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="169b4605-097f-4335-b814-f9f9845a2973" containerName="pruner" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.016707 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" containerName="registry-server" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.017061 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.018932 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.019304 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.020479 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.176594 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jzs9b" event={"ID":"a3091d34-d53f-4233-986c-dd854e1a00e7","Type":"ContainerDied","Data":"882c4bd312746d39f3069834345fc84f7fa3218b3df0cf668d9fa92da3c348d6"} Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.176609 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jzs9b" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.176642 4583 scope.go:117] "RemoveContainer" containerID="3a0a476ec5af82d678050a743d7bc094001bdfc66b29cf8c08a4afdb5e130d0e" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.188306 4583 scope.go:117] "RemoveContainer" containerID="cdba763bcf61fe9c876c522fba8be2bccc4a25171ef6029b1c1d0a1a1ce06c65" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.201410 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jzs9b"] Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.201454 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jzs9b"] Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.203025 4583 scope.go:117] "RemoveContainer" containerID="204eaf4392e0d923787204329a1bd64d0cdb4cfcae0fee5a6c2cb21ced89c780" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.205481 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.205593 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.306245 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.306307 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.306378 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.319592 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.330323 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.655938 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 09 15:01:23 crc kubenswrapper[4583]: W1209 15:01:23.659236 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod66e30bb3_a829_4470_b8cf_7aaad1e2e472.slice/crio-9f7152a06504544cee39ee775f9443caa14389ce92c9e7642a1d18e49911db80 WatchSource:0}: Error finding container 9f7152a06504544cee39ee775f9443caa14389ce92c9e7642a1d18e49911db80: Status 404 returned error can't find the container with id 9f7152a06504544cee39ee775f9443caa14389ce92c9e7642a1d18e49911db80 Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.777657 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.777700 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.804841 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:01:23 crc kubenswrapper[4583]: I1209 15:01:23.965768 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:01:24 crc kubenswrapper[4583]: I1209 15:01:24.086988 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3091d34-d53f-4233-986c-dd854e1a00e7" path="/var/lib/kubelet/pods/a3091d34-d53f-4233-986c-dd854e1a00e7/volumes" Dec 09 15:01:24 crc kubenswrapper[4583]: I1209 15:01:24.087532 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b38e7c7e-7c6b-464c-b50d-7b151bf404b5" path="/var/lib/kubelet/pods/b38e7c7e-7c6b-464c-b50d-7b151bf404b5/volumes" Dec 09 15:01:24 crc kubenswrapper[4583]: I1209 15:01:24.182579 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"66e30bb3-a829-4470-b8cf-7aaad1e2e472","Type":"ContainerStarted","Data":"994a0076d75818021b48863d1f1649a7d97130e4b9db5f216e547a839ab4464e"} Dec 09 15:01:24 crc kubenswrapper[4583]: I1209 15:01:24.182616 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"66e30bb3-a829-4470-b8cf-7aaad1e2e472","Type":"ContainerStarted","Data":"9f7152a06504544cee39ee775f9443caa14389ce92c9e7642a1d18e49911db80"} Dec 09 15:01:24 crc kubenswrapper[4583]: I1209 15:01:24.192366 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.192354532 podStartE2EDuration="1.192354532s" podCreationTimestamp="2025-12-09 15:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:01:24.190830187 +0000 UTC m=+182.398279275" watchObservedRunningTime="2025-12-09 15:01:24.192354532 +0000 UTC m=+182.399803620" Dec 09 15:01:24 crc kubenswrapper[4583]: I1209 15:01:24.213827 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:01:25 crc kubenswrapper[4583]: I1209 15:01:25.189163 4583 generic.go:334] "Generic (PLEG): container finished" podID="66e30bb3-a829-4470-b8cf-7aaad1e2e472" containerID="994a0076d75818021b48863d1f1649a7d97130e4b9db5f216e547a839ab4464e" exitCode=0 Dec 09 15:01:25 crc kubenswrapper[4583]: I1209 15:01:25.189207 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"66e30bb3-a829-4470-b8cf-7aaad1e2e472","Type":"ContainerDied","Data":"994a0076d75818021b48863d1f1649a7d97130e4b9db5f216e547a839ab4464e"} Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.426268 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.533709 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kubelet-dir\") pod \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.533816 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "66e30bb3-a829-4470-b8cf-7aaad1e2e472" (UID: "66e30bb3-a829-4470-b8cf-7aaad1e2e472"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.533844 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kube-api-access\") pod \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\" (UID: \"66e30bb3-a829-4470-b8cf-7aaad1e2e472\") " Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.533982 4583 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.537906 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "66e30bb3-a829-4470-b8cf-7aaad1e2e472" (UID: "66e30bb3-a829-4470-b8cf-7aaad1e2e472"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.634604 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/66e30bb3-a829-4470-b8cf-7aaad1e2e472-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.957925 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.958167 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:01:26 crc kubenswrapper[4583]: I1209 15:01:26.988721 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:01:27 crc kubenswrapper[4583]: I1209 15:01:27.019584 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-phvcn"] Dec 09 15:01:27 crc kubenswrapper[4583]: I1209 15:01:27.197085 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"66e30bb3-a829-4470-b8cf-7aaad1e2e472","Type":"ContainerDied","Data":"9f7152a06504544cee39ee775f9443caa14389ce92c9e7642a1d18e49911db80"} Dec 09 15:01:27 crc kubenswrapper[4583]: I1209 15:01:27.197412 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f7152a06504544cee39ee775f9443caa14389ce92c9e7642a1d18e49911db80" Dec 09 15:01:27 crc kubenswrapper[4583]: I1209 15:01:27.197106 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 09 15:01:27 crc kubenswrapper[4583]: I1209 15:01:27.221980 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.013330 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 09 15:01:28 crc kubenswrapper[4583]: E1209 15:01:28.013497 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e30bb3-a829-4470-b8cf-7aaad1e2e472" containerName="pruner" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.013508 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e30bb3-a829-4470-b8cf-7aaad1e2e472" containerName="pruner" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.013589 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e30bb3-a829-4470-b8cf-7aaad1e2e472" containerName="pruner" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.013898 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.015066 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.015738 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.047138 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.049388 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.049456 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f8d7a42-b256-4e44-9763-9e8d22086fac-kube-api-access\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.049535 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-var-lock\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.150735 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.150808 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f8d7a42-b256-4e44-9763-9e8d22086fac-kube-api-access\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.150857 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-var-lock\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.150808 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.150911 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-var-lock\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.164193 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f8d7a42-b256-4e44-9763-9e8d22086fac-kube-api-access\") pod \"installer-9-crc\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.203140 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.324467 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:01:28 crc kubenswrapper[4583]: I1209 15:01:28.683910 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 09 15:01:29 crc kubenswrapper[4583]: I1209 15:01:29.208723 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2f8d7a42-b256-4e44-9763-9e8d22086fac","Type":"ContainerStarted","Data":"3a4d174633509229bfa7a24b0355d0e6c728c813f42dc92c20a69332b1fbbb78"} Dec 09 15:01:29 crc kubenswrapper[4583]: I1209 15:01:29.208949 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2f8d7a42-b256-4e44-9763-9e8d22086fac","Type":"ContainerStarted","Data":"ae7b507b00a6a23d9defb31cd92fc9826c979e951868ce83dfeda3bf32a5724b"} Dec 09 15:01:29 crc kubenswrapper[4583]: I1209 15:01:29.223242 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.223226292 podStartE2EDuration="1.223226292s" podCreationTimestamp="2025-12-09 15:01:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:01:29.220708786 +0000 UTC m=+187.428157875" watchObservedRunningTime="2025-12-09 15:01:29.223226292 +0000 UTC m=+187.430675380" Dec 09 15:01:29 crc kubenswrapper[4583]: I1209 15:01:29.391595 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:01:29 crc kubenswrapper[4583]: I1209 15:01:29.391643 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.036017 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" podUID="24d1746e-1acb-433b-b1ea-c1d1aa756176" containerName="oauth-openshift" containerID="cri-o://5d2a3f2b1e13243a777aebf844619783399b3cf6642bab4a28fbd2fc03db3911" gracePeriod=15 Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.289544 4583 generic.go:334] "Generic (PLEG): container finished" podID="24d1746e-1acb-433b-b1ea-c1d1aa756176" containerID="5d2a3f2b1e13243a777aebf844619783399b3cf6642bab4a28fbd2fc03db3911" exitCode=0 Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.289581 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" event={"ID":"24d1746e-1acb-433b-b1ea-c1d1aa756176","Type":"ContainerDied","Data":"5d2a3f2b1e13243a777aebf844619783399b3cf6642bab4a28fbd2fc03db3911"} Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.332111 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.355843 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-95988dd86-rpk6h"] Dec 09 15:01:52 crc kubenswrapper[4583]: E1209 15:01:52.356036 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24d1746e-1acb-433b-b1ea-c1d1aa756176" containerName="oauth-openshift" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.356054 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="24d1746e-1acb-433b-b1ea-c1d1aa756176" containerName="oauth-openshift" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.356152 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="24d1746e-1acb-433b-b1ea-c1d1aa756176" containerName="oauth-openshift" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.356457 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.369799 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-95988dd86-rpk6h"] Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376252 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-error\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376288 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376308 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-service-ca\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376342 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-audit-policies\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376367 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376383 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-session\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376401 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-router-certs\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376417 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376431 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tnb2\" (UniqueName: \"kubernetes.io/projected/d8743175-a055-48a0-ae41-f0f4f1a7a73a-kube-api-access-7tnb2\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376446 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d8743175-a055-48a0-ae41-f0f4f1a7a73a-audit-dir\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376467 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376494 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376510 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-login\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.376538 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.477804 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-idp-0-file-data\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.477857 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-cliconfig\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.477897 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-trusted-ca-bundle\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.477917 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-login\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.477945 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-error\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.477974 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-policies\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478006 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-ocp-branding-template\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478025 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-serving-cert\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478038 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-session\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478055 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-dir\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478069 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-service-ca\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478084 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jw8s\" (UniqueName: \"kubernetes.io/projected/24d1746e-1acb-433b-b1ea-c1d1aa756176-kube-api-access-6jw8s\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478100 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-provider-selection\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478123 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-router-certs\") pod \"24d1746e-1acb-433b-b1ea-c1d1aa756176\" (UID: \"24d1746e-1acb-433b-b1ea-c1d1aa756176\") " Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478256 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-error\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478280 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478298 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-service-ca\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478325 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-audit-policies\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478348 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478365 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-session\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478381 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-router-certs\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478394 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tnb2\" (UniqueName: \"kubernetes.io/projected/d8743175-a055-48a0-ae41-f0f4f1a7a73a-kube-api-access-7tnb2\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478408 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d8743175-a055-48a0-ae41-f0f4f1a7a73a-audit-dir\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478423 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478444 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478471 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478475 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478493 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478485 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-login\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478581 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478647 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478665 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.478688 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d8743175-a055-48a0-ae41-f0f4f1a7a73a-audit-dir\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.479296 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.479369 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.479536 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-audit-policies\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.479569 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.480054 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-service-ca\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.480326 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.480772 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.482492 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.482720 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-error\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.482726 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.482755 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.482856 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.483061 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.484023 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24d1746e-1acb-433b-b1ea-c1d1aa756176-kube-api-access-6jw8s" (OuterVolumeSpecName: "kube-api-access-6jw8s") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "kube-api-access-6jw8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.484550 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.484644 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.484647 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.484956 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-router-certs\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.485031 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-system-session\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.485105 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d8743175-a055-48a0-ae41-f0f4f1a7a73a-v4-0-config-user-template-login\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.485511 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.485794 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.485970 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.486084 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "24d1746e-1acb-433b-b1ea-c1d1aa756176" (UID: "24d1746e-1acb-433b-b1ea-c1d1aa756176"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.492014 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tnb2\" (UniqueName: \"kubernetes.io/projected/d8743175-a055-48a0-ae41-f0f4f1a7a73a-kube-api-access-7tnb2\") pod \"oauth-openshift-95988dd86-rpk6h\" (UID: \"d8743175-a055-48a0-ae41-f0f4f1a7a73a\") " pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579342 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579367 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jw8s\" (UniqueName: \"kubernetes.io/projected/24d1746e-1acb-433b-b1ea-c1d1aa756176-kube-api-access-6jw8s\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579377 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579388 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579399 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579407 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579415 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579423 4583 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579431 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579440 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579448 4583 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/24d1746e-1acb-433b-b1ea-c1d1aa756176-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.579456 4583 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/24d1746e-1acb-433b-b1ea-c1d1aa756176-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:01:52 crc kubenswrapper[4583]: I1209 15:01:52.674827 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.007452 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-95988dd86-rpk6h"] Dec 09 15:01:53 crc kubenswrapper[4583]: W1209 15:01:53.010224 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8743175_a055_48a0_ae41_f0f4f1a7a73a.slice/crio-aee4f1d961d060a64c602c88449f0e2806f77b42cfba0189bec8259536d351da WatchSource:0}: Error finding container aee4f1d961d060a64c602c88449f0e2806f77b42cfba0189bec8259536d351da: Status 404 returned error can't find the container with id aee4f1d961d060a64c602c88449f0e2806f77b42cfba0189bec8259536d351da Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.294718 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" event={"ID":"d8743175-a055-48a0-ae41-f0f4f1a7a73a","Type":"ContainerStarted","Data":"937ec21e6066008186e9c7c3d559f3182f2e8802bbc9cf3f48fc2531d2267010"} Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.294934 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.294944 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" event={"ID":"d8743175-a055-48a0-ae41-f0f4f1a7a73a","Type":"ContainerStarted","Data":"aee4f1d961d060a64c602c88449f0e2806f77b42cfba0189bec8259536d351da"} Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.297168 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" event={"ID":"24d1746e-1acb-433b-b1ea-c1d1aa756176","Type":"ContainerDied","Data":"9e49953d5116bb53f7781f1d67efff40ed63fc00accddda538ec71918787b24c"} Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.297232 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-phvcn" Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.297256 4583 scope.go:117] "RemoveContainer" containerID="5d2a3f2b1e13243a777aebf844619783399b3cf6642bab4a28fbd2fc03db3911" Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.323097 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" podStartSLOduration=26.323080037 podStartE2EDuration="26.323080037s" podCreationTimestamp="2025-12-09 15:01:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:01:53.310630966 +0000 UTC m=+211.518080054" watchObservedRunningTime="2025-12-09 15:01:53.323080037 +0000 UTC m=+211.530529124" Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.327101 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-phvcn"] Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.329548 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-phvcn"] Dec 09 15:01:53 crc kubenswrapper[4583]: E1209 15:01:53.338483 4583 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24d1746e_1acb_433b_b1ea_c1d1aa756176.slice\": RecentStats: unable to find data in memory cache]" Dec 09 15:01:53 crc kubenswrapper[4583]: I1209 15:01:53.471377 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-95988dd86-rpk6h" Dec 09 15:01:54 crc kubenswrapper[4583]: I1209 15:01:54.084058 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24d1746e-1acb-433b-b1ea-c1d1aa756176" path="/var/lib/kubelet/pods/24d1746e-1acb-433b-b1ea-c1d1aa756176/volumes" Dec 09 15:01:59 crc kubenswrapper[4583]: I1209 15:01:59.391882 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:01:59 crc kubenswrapper[4583]: I1209 15:01:59.392093 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:01:59 crc kubenswrapper[4583]: I1209 15:01:59.392132 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:01:59 crc kubenswrapper[4583]: I1209 15:01:59.392542 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:01:59 crc kubenswrapper[4583]: I1209 15:01:59.392702 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815" gracePeriod=600 Dec 09 15:02:00 crc kubenswrapper[4583]: I1209 15:02:00.326575 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815" exitCode=0 Dec 09 15:02:00 crc kubenswrapper[4583]: I1209 15:02:00.326603 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815"} Dec 09 15:02:00 crc kubenswrapper[4583]: I1209 15:02:00.326873 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"a3cd7ba3758e939105e3e602682419ab4dde31e2b1948da8cf196038e53ae5c0"} Dec 09 15:02:00 crc kubenswrapper[4583]: I1209 15:02:00.990350 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-92sgh"] Dec 09 15:02:00 crc kubenswrapper[4583]: I1209 15:02:00.990723 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-92sgh" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="registry-server" containerID="cri-o://c3ebc45d1291e85361c66fb94ab746ac156bd9e8fc6eb1f0d6cdf3daba95325c" gracePeriod=30 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.000220 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wpmv6"] Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.000424 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wpmv6" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="registry-server" containerID="cri-o://bbfd10fb1ec1fd940fe2ede82c64c663b7df1a4dd5eb72b9e0d53a42db2fcb9b" gracePeriod=30 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.004098 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nfx8b"] Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.004274 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerName="marketplace-operator" containerID="cri-o://ee4dc382e3a5f4610479338d2283267a9ef3c2c7765d601606b5259aa6386a52" gracePeriod=30 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.008822 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjmdc"] Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.008966 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fjmdc" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="registry-server" containerID="cri-o://7ebb06978b71f4541a39226f7e824946bfe334d0794a6205d0585be7249990c8" gracePeriod=30 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.012225 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d7mjv"] Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.012784 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.020227 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9rxmd"] Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.020498 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9rxmd" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="registry-server" containerID="cri-o://238104d3cd5d5e80e46f5665e346726f75affe4473cdb7bed58e1bc157301ec2" gracePeriod=30 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.032906 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d7mjv"] Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.156157 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/51e4c62b-29d0-4662-916c-5c9bfbd33598-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.156236 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51e4c62b-29d0-4662-916c-5c9bfbd33598-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.156271 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xblgr\" (UniqueName: \"kubernetes.io/projected/51e4c62b-29d0-4662-916c-5c9bfbd33598-kube-api-access-xblgr\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.257670 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51e4c62b-29d0-4662-916c-5c9bfbd33598-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.257780 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xblgr\" (UniqueName: \"kubernetes.io/projected/51e4c62b-29d0-4662-916c-5c9bfbd33598-kube-api-access-xblgr\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.257820 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/51e4c62b-29d0-4662-916c-5c9bfbd33598-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.258886 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51e4c62b-29d0-4662-916c-5c9bfbd33598-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.264920 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/51e4c62b-29d0-4662-916c-5c9bfbd33598-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.270910 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xblgr\" (UniqueName: \"kubernetes.io/projected/51e4c62b-29d0-4662-916c-5c9bfbd33598-kube-api-access-xblgr\") pod \"marketplace-operator-79b997595-d7mjv\" (UID: \"51e4c62b-29d0-4662-916c-5c9bfbd33598\") " pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.335973 4583 generic.go:334] "Generic (PLEG): container finished" podID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerID="bbfd10fb1ec1fd940fe2ede82c64c663b7df1a4dd5eb72b9e0d53a42db2fcb9b" exitCode=0 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.336050 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpmv6" event={"ID":"9154b98b-4bb4-4c44-ba5a-9ae277a083e4","Type":"ContainerDied","Data":"bbfd10fb1ec1fd940fe2ede82c64c663b7df1a4dd5eb72b9e0d53a42db2fcb9b"} Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.337450 4583 generic.go:334] "Generic (PLEG): container finished" podID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerID="ee4dc382e3a5f4610479338d2283267a9ef3c2c7765d601606b5259aa6386a52" exitCode=0 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.337503 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" event={"ID":"8f0567dd-f660-4e85-bc98-2e3b60b6a34c","Type":"ContainerDied","Data":"ee4dc382e3a5f4610479338d2283267a9ef3c2c7765d601606b5259aa6386a52"} Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.339321 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.339701 4583 generic.go:334] "Generic (PLEG): container finished" podID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerID="238104d3cd5d5e80e46f5665e346726f75affe4473cdb7bed58e1bc157301ec2" exitCode=0 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.339770 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rxmd" event={"ID":"9c53c4b3-06a8-411e-9273-91b956969fb7","Type":"ContainerDied","Data":"238104d3cd5d5e80e46f5665e346726f75affe4473cdb7bed58e1bc157301ec2"} Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.343306 4583 generic.go:334] "Generic (PLEG): container finished" podID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerID="7ebb06978b71f4541a39226f7e824946bfe334d0794a6205d0585be7249990c8" exitCode=0 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.343371 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerDied","Data":"7ebb06978b71f4541a39226f7e824946bfe334d0794a6205d0585be7249990c8"} Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.357384 4583 generic.go:334] "Generic (PLEG): container finished" podID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerID="c3ebc45d1291e85361c66fb94ab746ac156bd9e8fc6eb1f0d6cdf3daba95325c" exitCode=0 Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.357437 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerDied","Data":"c3ebc45d1291e85361c66fb94ab746ac156bd9e8fc6eb1f0d6cdf3daba95325c"} Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.357461 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92sgh" event={"ID":"baf822f8-4771-4b04-b412-f84f5a7bad3a","Type":"ContainerDied","Data":"09c9705b7a1c367452028b547da1fc57db24a9757dfc00694ad4b92fc7b71676"} Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.357471 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09c9705b7a1c367452028b547da1fc57db24a9757dfc00694ad4b92fc7b71676" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.431495 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.436231 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.447628 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.461161 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.463329 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.559964 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-trusted-ca\") pod \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560015 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd6cn\" (UniqueName: \"kubernetes.io/projected/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-kube-api-access-jd6cn\") pod \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560049 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc5hs\" (UniqueName: \"kubernetes.io/projected/71b4c55d-00ff-430c-9a13-5a5ad88e8231-kube-api-access-fc5hs\") pod \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560078 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-utilities\") pod \"baf822f8-4771-4b04-b412-f84f5a7bad3a\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560104 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-utilities\") pod \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560122 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcrj7\" (UniqueName: \"kubernetes.io/projected/baf822f8-4771-4b04-b412-f84f5a7bad3a-kube-api-access-zcrj7\") pod \"baf822f8-4771-4b04-b412-f84f5a7bad3a\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560137 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-catalog-content\") pod \"baf822f8-4771-4b04-b412-f84f5a7bad3a\" (UID: \"baf822f8-4771-4b04-b412-f84f5a7bad3a\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560165 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcxvl\" (UniqueName: \"kubernetes.io/projected/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-kube-api-access-kcxvl\") pod \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560193 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-utilities\") pod \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560211 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-operator-metrics\") pod \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\" (UID: \"8f0567dd-f660-4e85-bc98-2e3b60b6a34c\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560234 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-catalog-content\") pod \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\" (UID: \"71b4c55d-00ff-430c-9a13-5a5ad88e8231\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560259 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-catalog-content\") pod \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\" (UID: \"9154b98b-4bb4-4c44-ba5a-9ae277a083e4\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560663 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-utilities" (OuterVolumeSpecName: "utilities") pod "71b4c55d-00ff-430c-9a13-5a5ad88e8231" (UID: "71b4c55d-00ff-430c-9a13-5a5ad88e8231"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.560704 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "8f0567dd-f660-4e85-bc98-2e3b60b6a34c" (UID: "8f0567dd-f660-4e85-bc98-2e3b60b6a34c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.561052 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-utilities" (OuterVolumeSpecName: "utilities") pod "baf822f8-4771-4b04-b412-f84f5a7bad3a" (UID: "baf822f8-4771-4b04-b412-f84f5a7bad3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.563484 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-kube-api-access-jd6cn" (OuterVolumeSpecName: "kube-api-access-jd6cn") pod "9154b98b-4bb4-4c44-ba5a-9ae277a083e4" (UID: "9154b98b-4bb4-4c44-ba5a-9ae277a083e4"). InnerVolumeSpecName "kube-api-access-jd6cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.563511 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71b4c55d-00ff-430c-9a13-5a5ad88e8231-kube-api-access-fc5hs" (OuterVolumeSpecName: "kube-api-access-fc5hs") pod "71b4c55d-00ff-430c-9a13-5a5ad88e8231" (UID: "71b4c55d-00ff-430c-9a13-5a5ad88e8231"). InnerVolumeSpecName "kube-api-access-fc5hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.563647 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "8f0567dd-f660-4e85-bc98-2e3b60b6a34c" (UID: "8f0567dd-f660-4e85-bc98-2e3b60b6a34c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.565073 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baf822f8-4771-4b04-b412-f84f5a7bad3a-kube-api-access-zcrj7" (OuterVolumeSpecName: "kube-api-access-zcrj7") pod "baf822f8-4771-4b04-b412-f84f5a7bad3a" (UID: "baf822f8-4771-4b04-b412-f84f5a7bad3a"). InnerVolumeSpecName "kube-api-access-zcrj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.565236 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-utilities" (OuterVolumeSpecName: "utilities") pod "9154b98b-4bb4-4c44-ba5a-9ae277a083e4" (UID: "9154b98b-4bb4-4c44-ba5a-9ae277a083e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.566258 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-kube-api-access-kcxvl" (OuterVolumeSpecName: "kube-api-access-kcxvl") pod "8f0567dd-f660-4e85-bc98-2e3b60b6a34c" (UID: "8f0567dd-f660-4e85-bc98-2e3b60b6a34c"). InnerVolumeSpecName "kube-api-access-kcxvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.580799 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71b4c55d-00ff-430c-9a13-5a5ad88e8231" (UID: "71b4c55d-00ff-430c-9a13-5a5ad88e8231"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.607242 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "baf822f8-4771-4b04-b412-f84f5a7bad3a" (UID: "baf822f8-4771-4b04-b412-f84f5a7bad3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.609899 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9154b98b-4bb4-4c44-ba5a-9ae277a083e4" (UID: "9154b98b-4bb4-4c44-ba5a-9ae277a083e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.661699 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-catalog-content\") pod \"9c53c4b3-06a8-411e-9273-91b956969fb7\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.661828 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hr4q\" (UniqueName: \"kubernetes.io/projected/9c53c4b3-06a8-411e-9273-91b956969fb7-kube-api-access-6hr4q\") pod \"9c53c4b3-06a8-411e-9273-91b956969fb7\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.661855 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-utilities\") pod \"9c53c4b3-06a8-411e-9273-91b956969fb7\" (UID: \"9c53c4b3-06a8-411e-9273-91b956969fb7\") " Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662058 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662075 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcrj7\" (UniqueName: \"kubernetes.io/projected/baf822f8-4771-4b04-b412-f84f5a7bad3a-kube-api-access-zcrj7\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662084 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662092 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcxvl\" (UniqueName: \"kubernetes.io/projected/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-kube-api-access-kcxvl\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662101 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662109 4583 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662119 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b4c55d-00ff-430c-9a13-5a5ad88e8231-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662130 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662141 4583 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f0567dd-f660-4e85-bc98-2e3b60b6a34c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662149 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd6cn\" (UniqueName: \"kubernetes.io/projected/9154b98b-4bb4-4c44-ba5a-9ae277a083e4-kube-api-access-jd6cn\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662158 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc5hs\" (UniqueName: \"kubernetes.io/projected/71b4c55d-00ff-430c-9a13-5a5ad88e8231-kube-api-access-fc5hs\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662167 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf822f8-4771-4b04-b412-f84f5a7bad3a-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.662734 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-utilities" (OuterVolumeSpecName: "utilities") pod "9c53c4b3-06a8-411e-9273-91b956969fb7" (UID: "9c53c4b3-06a8-411e-9273-91b956969fb7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.663739 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c53c4b3-06a8-411e-9273-91b956969fb7-kube-api-access-6hr4q" (OuterVolumeSpecName: "kube-api-access-6hr4q") pod "9c53c4b3-06a8-411e-9273-91b956969fb7" (UID: "9c53c4b3-06a8-411e-9273-91b956969fb7"). InnerVolumeSpecName "kube-api-access-6hr4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.699274 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d7mjv"] Dec 09 15:02:01 crc kubenswrapper[4583]: W1209 15:02:01.703020 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51e4c62b_29d0_4662_916c_5c9bfbd33598.slice/crio-3f5666783576352a3baed12a09c4a163be63393717cbe834c560ac82549cc2ce WatchSource:0}: Error finding container 3f5666783576352a3baed12a09c4a163be63393717cbe834c560ac82549cc2ce: Status 404 returned error can't find the container with id 3f5666783576352a3baed12a09c4a163be63393717cbe834c560ac82549cc2ce Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.746477 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c53c4b3-06a8-411e-9273-91b956969fb7" (UID: "9c53c4b3-06a8-411e-9273-91b956969fb7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.762804 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hr4q\" (UniqueName: \"kubernetes.io/projected/9c53c4b3-06a8-411e-9273-91b956969fb7-kube-api-access-6hr4q\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.763139 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:01 crc kubenswrapper[4583]: I1209 15:02:01.763152 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c53c4b3-06a8-411e-9273-91b956969fb7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.361807 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" event={"ID":"51e4c62b-29d0-4662-916c-5c9bfbd33598","Type":"ContainerStarted","Data":"9ad81a885f3da26a9b792219ee179a5c9437c0f4da2adf760d68e24f0e2961a1"} Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.361842 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" event={"ID":"51e4c62b-29d0-4662-916c-5c9bfbd33598","Type":"ContainerStarted","Data":"3f5666783576352a3baed12a09c4a163be63393717cbe834c560ac82549cc2ce"} Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.362056 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.365109 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.365144 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rxmd" event={"ID":"9c53c4b3-06a8-411e-9273-91b956969fb7","Type":"ContainerDied","Data":"35b112174073274f9f8249ded1feb5961cdfb523c23aac0b7a4ad5eb3a5ef7c1"} Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.365158 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rxmd" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.365175 4583 scope.go:117] "RemoveContainer" containerID="238104d3cd5d5e80e46f5665e346726f75affe4473cdb7bed58e1bc157301ec2" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.367341 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjmdc" event={"ID":"71b4c55d-00ff-430c-9a13-5a5ad88e8231","Type":"ContainerDied","Data":"b99198f52d59806de7ac831c8570c309652a4667e18af9313e78707e9c4b1d8b"} Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.367424 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjmdc" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.369482 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpmv6" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.369498 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpmv6" event={"ID":"9154b98b-4bb4-4c44-ba5a-9ae277a083e4","Type":"ContainerDied","Data":"bd1793d0e06ea5d914bbd7ecaa5d44c5a26193a9eee06d857e2b4708ee654209"} Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.370535 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92sgh" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.370776 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" event={"ID":"8f0567dd-f660-4e85-bc98-2e3b60b6a34c","Type":"ContainerDied","Data":"6800e34f36cf54d5a1547f3dd2e3d3ac05f3f9a066aa5af2c76955f9c1a828e0"} Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.370781 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nfx8b" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.375282 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-d7mjv" podStartSLOduration=2.375271056 podStartE2EDuration="2.375271056s" podCreationTimestamp="2025-12-09 15:02:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:02:02.373679588 +0000 UTC m=+220.581128676" watchObservedRunningTime="2025-12-09 15:02:02.375271056 +0000 UTC m=+220.582720144" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.377875 4583 scope.go:117] "RemoveContainer" containerID="ca2e52e4d83a64f59cde887bc204b883c2891e398a6c7dad199854a02c7a1b9f" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.399314 4583 scope.go:117] "RemoveContainer" containerID="eb6dc8d24129e983b205860c8310e80e78fe213059cc6f3d446ce2fa21e2d115" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.417140 4583 scope.go:117] "RemoveContainer" containerID="7ebb06978b71f4541a39226f7e824946bfe334d0794a6205d0585be7249990c8" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.417920 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9rxmd"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.420151 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9rxmd"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.426701 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjmdc"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.436599 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjmdc"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.438479 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-92sgh"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.439761 4583 scope.go:117] "RemoveContainer" containerID="3760c142223acd98eabfde24eaf3c127a8499b3d2e578f1badf9fcf74ef29780" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.443227 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-92sgh"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.444897 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nfx8b"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.446732 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nfx8b"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.448599 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wpmv6"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.450332 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wpmv6"] Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.452056 4583 scope.go:117] "RemoveContainer" containerID="91b86a545ea0229900da80ef81fe5ac48e3ee6dfc2c7f3ac093eb9d98ca14828" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.462777 4583 scope.go:117] "RemoveContainer" containerID="bbfd10fb1ec1fd940fe2ede82c64c663b7df1a4dd5eb72b9e0d53a42db2fcb9b" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.471823 4583 scope.go:117] "RemoveContainer" containerID="e9d530425ba82ad1ef934b90f7a5610d2dc0f665ae64c2fdcaab3bf436b92b72" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.485775 4583 scope.go:117] "RemoveContainer" containerID="2cf7f2a1ff512c04aa551945e043b09291bed9e0f31eaa8c97146b22636dc661" Dec 09 15:02:02 crc kubenswrapper[4583]: I1209 15:02:02.495675 4583 scope.go:117] "RemoveContainer" containerID="ee4dc382e3a5f4610479338d2283267a9ef3c2c7765d601606b5259aa6386a52" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199278 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wzxcc"] Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199589 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199599 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199608 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerName="marketplace-operator" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199613 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerName="marketplace-operator" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199621 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199626 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199631 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199636 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199643 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199648 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199656 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199661 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199668 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199674 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199681 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199686 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199692 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199697 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199704 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199709 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="extract-utilities" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199715 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199720 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199726 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199731 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: E1209 15:02:03.199769 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199775 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="extract-content" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199855 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199864 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" containerName="marketplace-operator" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199870 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199877 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.199886 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" containerName="registry-server" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.200429 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.202396 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.205616 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzxcc"] Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.278368 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbdedc1b-dd30-46cf-b517-3c3f626560a3-utilities\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.278458 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbdedc1b-dd30-46cf-b517-3c3f626560a3-catalog-content\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.278529 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkgmt\" (UniqueName: \"kubernetes.io/projected/cbdedc1b-dd30-46cf-b517-3c3f626560a3-kube-api-access-dkgmt\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.379110 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkgmt\" (UniqueName: \"kubernetes.io/projected/cbdedc1b-dd30-46cf-b517-3c3f626560a3-kube-api-access-dkgmt\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.379168 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbdedc1b-dd30-46cf-b517-3c3f626560a3-utilities\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.379516 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbdedc1b-dd30-46cf-b517-3c3f626560a3-catalog-content\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.380041 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbdedc1b-dd30-46cf-b517-3c3f626560a3-utilities\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.380236 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbdedc1b-dd30-46cf-b517-3c3f626560a3-catalog-content\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.396582 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkgmt\" (UniqueName: \"kubernetes.io/projected/cbdedc1b-dd30-46cf-b517-3c3f626560a3-kube-api-access-dkgmt\") pod \"redhat-marketplace-wzxcc\" (UID: \"cbdedc1b-dd30-46cf-b517-3c3f626560a3\") " pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.401856 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c46x5"] Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.402621 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.404095 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.407384 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c46x5"] Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.480997 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-utilities\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.481067 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p29sw\" (UniqueName: \"kubernetes.io/projected/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-kube-api-access-p29sw\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.481115 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-catalog-content\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.515881 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.581989 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p29sw\" (UniqueName: \"kubernetes.io/projected/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-kube-api-access-p29sw\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.582056 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-catalog-content\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.582110 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-utilities\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.582671 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-utilities\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.582715 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-catalog-content\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.596959 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p29sw\" (UniqueName: \"kubernetes.io/projected/2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c-kube-api-access-p29sw\") pod \"redhat-operators-c46x5\" (UID: \"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c\") " pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.727964 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:03 crc kubenswrapper[4583]: I1209 15:02:03.852141 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzxcc"] Dec 09 15:02:03 crc kubenswrapper[4583]: W1209 15:02:03.858043 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbdedc1b_dd30_46cf_b517_3c3f626560a3.slice/crio-5bd6a98803a2cdf4650bbf81ac12c0359f63dd75778e96eeaba31a343631c2f1 WatchSource:0}: Error finding container 5bd6a98803a2cdf4650bbf81ac12c0359f63dd75778e96eeaba31a343631c2f1: Status 404 returned error can't find the container with id 5bd6a98803a2cdf4650bbf81ac12c0359f63dd75778e96eeaba31a343631c2f1 Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.050427 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c46x5"] Dec 09 15:02:04 crc kubenswrapper[4583]: W1209 15:02:04.054931 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dfe0b67_0d5e_406e_bbd3_de46a80d2a8c.slice/crio-605b001b62313c28334f51fcf6101aab70a76cfa21742ae2558b3ea3631632be WatchSource:0}: Error finding container 605b001b62313c28334f51fcf6101aab70a76cfa21742ae2558b3ea3631632be: Status 404 returned error can't find the container with id 605b001b62313c28334f51fcf6101aab70a76cfa21742ae2558b3ea3631632be Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.083474 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71b4c55d-00ff-430c-9a13-5a5ad88e8231" path="/var/lib/kubelet/pods/71b4c55d-00ff-430c-9a13-5a5ad88e8231/volumes" Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.084201 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f0567dd-f660-4e85-bc98-2e3b60b6a34c" path="/var/lib/kubelet/pods/8f0567dd-f660-4e85-bc98-2e3b60b6a34c/volumes" Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.084602 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9154b98b-4bb4-4c44-ba5a-9ae277a083e4" path="/var/lib/kubelet/pods/9154b98b-4bb4-4c44-ba5a-9ae277a083e4/volumes" Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.085704 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c53c4b3-06a8-411e-9273-91b956969fb7" path="/var/lib/kubelet/pods/9c53c4b3-06a8-411e-9273-91b956969fb7/volumes" Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.086294 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baf822f8-4771-4b04-b412-f84f5a7bad3a" path="/var/lib/kubelet/pods/baf822f8-4771-4b04-b412-f84f5a7bad3a/volumes" Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.383613 4583 generic.go:334] "Generic (PLEG): container finished" podID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" containerID="df5be4efc8270f22013032afb9b4cfac0e0005160823cce12d1c86d43a0a7a08" exitCode=0 Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.383692 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c46x5" event={"ID":"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c","Type":"ContainerDied","Data":"df5be4efc8270f22013032afb9b4cfac0e0005160823cce12d1c86d43a0a7a08"} Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.383717 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c46x5" event={"ID":"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c","Type":"ContainerStarted","Data":"605b001b62313c28334f51fcf6101aab70a76cfa21742ae2558b3ea3631632be"} Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.385059 4583 generic.go:334] "Generic (PLEG): container finished" podID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" containerID="85147457f69801801495bef65d51126d9f5208e3f3bba014727413627821dbbc" exitCode=0 Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.385139 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzxcc" event={"ID":"cbdedc1b-dd30-46cf-b517-3c3f626560a3","Type":"ContainerDied","Data":"85147457f69801801495bef65d51126d9f5208e3f3bba014727413627821dbbc"} Dec 09 15:02:04 crc kubenswrapper[4583]: I1209 15:02:04.385184 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzxcc" event={"ID":"cbdedc1b-dd30-46cf-b517-3c3f626560a3","Type":"ContainerStarted","Data":"5bd6a98803a2cdf4650bbf81ac12c0359f63dd75778e96eeaba31a343631c2f1"} Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.391579 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c46x5" event={"ID":"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c","Type":"ContainerStarted","Data":"f4ca9e9e66fdef95f95bc665d66f364d4c56ce29f27b7f4c0e70187112ac8066"} Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.396417 4583 generic.go:334] "Generic (PLEG): container finished" podID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" containerID="cebc30d688c16ba52db11924e5be5a295f15d10110ba32f42c7514dc5f004069" exitCode=0 Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.396441 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzxcc" event={"ID":"cbdedc1b-dd30-46cf-b517-3c3f626560a3","Type":"ContainerDied","Data":"cebc30d688c16ba52db11924e5be5a295f15d10110ba32f42c7514dc5f004069"} Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.597577 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kjb9f"] Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.598406 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.600215 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.603861 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kjb9f"] Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.706934 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6dw6\" (UniqueName: \"kubernetes.io/projected/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-kube-api-access-l6dw6\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.707105 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-catalog-content\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.707216 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-utilities\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.798926 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2crjx"] Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.800091 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.802014 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.805484 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2crjx"] Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.807994 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-utilities\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.808032 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6dw6\" (UniqueName: \"kubernetes.io/projected/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-kube-api-access-l6dw6\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.808070 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-catalog-content\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.808764 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-catalog-content\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.808842 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-utilities\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.828902 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6dw6\" (UniqueName: \"kubernetes.io/projected/e305bf53-d531-4bbf-a6a7-38ad3a3c7189-kube-api-access-l6dw6\") pod \"certified-operators-kjb9f\" (UID: \"e305bf53-d531-4bbf-a6a7-38ad3a3c7189\") " pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.908808 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf53a407-5aab-44dc-a635-a8e2f68443cf-catalog-content\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.908856 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8l7r\" (UniqueName: \"kubernetes.io/projected/cf53a407-5aab-44dc-a635-a8e2f68443cf-kube-api-access-m8l7r\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.909009 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf53a407-5aab-44dc-a635-a8e2f68443cf-utilities\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:05 crc kubenswrapper[4583]: I1209 15:02:05.909992 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.010287 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf53a407-5aab-44dc-a635-a8e2f68443cf-catalog-content\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.010332 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8l7r\" (UniqueName: \"kubernetes.io/projected/cf53a407-5aab-44dc-a635-a8e2f68443cf-kube-api-access-m8l7r\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.010391 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf53a407-5aab-44dc-a635-a8e2f68443cf-utilities\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.010726 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf53a407-5aab-44dc-a635-a8e2f68443cf-catalog-content\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.010785 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf53a407-5aab-44dc-a635-a8e2f68443cf-utilities\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.025265 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8l7r\" (UniqueName: \"kubernetes.io/projected/cf53a407-5aab-44dc-a635-a8e2f68443cf-kube-api-access-m8l7r\") pod \"community-operators-2crjx\" (UID: \"cf53a407-5aab-44dc-a635-a8e2f68443cf\") " pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.118245 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.238496 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kjb9f"] Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.395486 4583 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.395978 4583 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.396185 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96" gracePeriod=15 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.396312 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.396333 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011" gracePeriod=15 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.396331 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16" gracePeriod=15 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.396374 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d" gracePeriod=15 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.396343 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578" gracePeriod=15 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397285 4583 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397469 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397480 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397489 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397494 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397501 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397506 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397517 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397522 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397530 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397535 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397542 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397547 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.397553 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397558 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397622 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397632 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397639 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397645 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397652 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.397819 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.404766 4583 generic.go:334] "Generic (PLEG): container finished" podID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" containerID="f4ca9e9e66fdef95f95bc665d66f364d4c56ce29f27b7f4c0e70187112ac8066" exitCode=0 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.404917 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c46x5" event={"ID":"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c","Type":"ContainerDied","Data":"f4ca9e9e66fdef95f95bc665d66f364d4c56ce29f27b7f4c0e70187112ac8066"} Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.407804 4583 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.408407 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzxcc" event={"ID":"cbdedc1b-dd30-46cf-b517-3c3f626560a3","Type":"ContainerStarted","Data":"4394b9ffee25c85c28e7a62bb13a12821f5ebee884abc6048c1a7b6aa7f7dba0"} Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.410592 4583 generic.go:334] "Generic (PLEG): container finished" podID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" containerID="d0f41985562582130df9f62f0f75ab285250c720cc50ed04589b06312a1484dd" exitCode=0 Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.410615 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjb9f" event={"ID":"e305bf53-d531-4bbf-a6a7-38ad3a3c7189","Type":"ContainerDied","Data":"d0f41985562582130df9f62f0f75ab285250c720cc50ed04589b06312a1484dd"} Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.410629 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjb9f" event={"ID":"e305bf53-d531-4bbf-a6a7-38ad3a3c7189","Type":"ContainerStarted","Data":"5437f9f8d0f687a20493848042504fd00449fc540487502fe0e9683fe25924e4"} Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.420804 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.420852 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.420882 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.420909 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.420927 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.420983 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.421032 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.421590 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.434858 4583 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.26.88:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.523612 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524041 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524067 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524085 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524119 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524136 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524154 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524165 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524221 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.523727 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524259 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524276 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524294 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524312 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524328 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.524344 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.614394 4583 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 09 15:02:06 crc kubenswrapper[4583]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd): error adding pod openshift-marketplace_community-operators-2crjx to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd" Netns:"/var/run/netns/5330febc-c8f5-4f12-a207-3a54b9e80e01" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf" Path:"" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 15:02:06 crc kubenswrapper[4583]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 09 15:02:06 crc kubenswrapper[4583]: > Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.614488 4583 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 09 15:02:06 crc kubenswrapper[4583]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd): error adding pod openshift-marketplace_community-operators-2crjx to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd" Netns:"/var/run/netns/5330febc-c8f5-4f12-a207-3a54b9e80e01" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf" Path:"" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 15:02:06 crc kubenswrapper[4583]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 09 15:02:06 crc kubenswrapper[4583]: > pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.614508 4583 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 09 15:02:06 crc kubenswrapper[4583]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd): error adding pod openshift-marketplace_community-operators-2crjx to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd" Netns:"/var/run/netns/5330febc-c8f5-4f12-a207-3a54b9e80e01" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf" Path:"" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 15:02:06 crc kubenswrapper[4583]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 09 15:02:06 crc kubenswrapper[4583]: > pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:06 crc kubenswrapper[4583]: E1209 15:02:06.614561 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"community-operators-2crjx_openshift-marketplace(cf53a407-5aab-44dc-a635-a8e2f68443cf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"community-operators-2crjx_openshift-marketplace(cf53a407-5aab-44dc-a635-a8e2f68443cf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd): error adding pod openshift-marketplace_community-operators-2crjx to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd\\\" Netns:\\\"/var/run/netns/5330febc-c8f5-4f12-a207-3a54b9e80e01\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=29a647ea793a7ce08cb2cf571499fbd7350eb2547e53512cced95346f07f10fd;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s\\\": dial tcp 192.168.26.88:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/community-operators-2crjx" podUID="cf53a407-5aab-44dc-a635-a8e2f68443cf" Dec 09 15:02:06 crc kubenswrapper[4583]: I1209 15:02:06.736187 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:06 crc kubenswrapper[4583]: W1209 15:02:06.748509 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-6555b708f49305157a447db54c51056f64fea7df4c8bbdd6cbe7376751388e96 WatchSource:0}: Error finding container 6555b708f49305157a447db54c51056f64fea7df4c8bbdd6cbe7376751388e96: Status 404 returned error can't find the container with id 6555b708f49305157a447db54c51056f64fea7df4c8bbdd6cbe7376751388e96 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.430883 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c46x5" event={"ID":"2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c","Type":"ContainerStarted","Data":"a2a28d244daeaa376bb89a233b82d1d1cc160195de7c45d1d947d02ba873efcb"} Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.432112 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7da33174343423eb47e7bd21b7c565bcb945001ceb2f859d7b4e479426e27767"} Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.432315 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6555b708f49305157a447db54c51056f64fea7df4c8bbdd6cbe7376751388e96"} Dec 09 15:02:07 crc kubenswrapper[4583]: E1209 15:02:07.432864 4583 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.26.88:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.433854 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.434897 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.435400 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011" exitCode=0 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.435420 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d" exitCode=0 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.435428 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578" exitCode=0 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.435436 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16" exitCode=2 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.435495 4583 scope.go:117] "RemoveContainer" containerID="3c9501d701e3c850e7584eb9aba4363349c94a99cd3fc92089db61190b988393" Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.440983 4583 generic.go:334] "Generic (PLEG): container finished" podID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" containerID="9a08720ac3055bdca6d8b92933166d4703a544532daed51bb936cd03e6c245ae" exitCode=0 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.441052 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjb9f" event={"ID":"e305bf53-d531-4bbf-a6a7-38ad3a3c7189","Type":"ContainerDied","Data":"9a08720ac3055bdca6d8b92933166d4703a544532daed51bb936cd03e6c245ae"} Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.442877 4583 generic.go:334] "Generic (PLEG): container finished" podID="2f8d7a42-b256-4e44-9763-9e8d22086fac" containerID="3a4d174633509229bfa7a24b0355d0e6c728c813f42dc92c20a69332b1fbbb78" exitCode=0 Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.442932 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.443161 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:07 crc kubenswrapper[4583]: I1209 15:02:07.443311 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2f8d7a42-b256-4e44-9763-9e8d22086fac","Type":"ContainerDied","Data":"3a4d174633509229bfa7a24b0355d0e6c728c813f42dc92c20a69332b1fbbb78"} Dec 09 15:02:07 crc kubenswrapper[4583]: E1209 15:02:07.955057 4583 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 09 15:02:07 crc kubenswrapper[4583]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87): error adding pod openshift-marketplace_community-operators-2crjx to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87" Netns:"/var/run/netns/bf0d712d-587c-4319-8459-c7606af5658a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf" Path:"" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 15:02:07 crc kubenswrapper[4583]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 09 15:02:07 crc kubenswrapper[4583]: > Dec 09 15:02:07 crc kubenswrapper[4583]: E1209 15:02:07.955257 4583 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 09 15:02:07 crc kubenswrapper[4583]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87): error adding pod openshift-marketplace_community-operators-2crjx to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87" Netns:"/var/run/netns/bf0d712d-587c-4319-8459-c7606af5658a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf" Path:"" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 15:02:07 crc kubenswrapper[4583]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 09 15:02:07 crc kubenswrapper[4583]: > pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:07 crc kubenswrapper[4583]: E1209 15:02:07.955277 4583 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 09 15:02:07 crc kubenswrapper[4583]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87): error adding pod openshift-marketplace_community-operators-2crjx to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87" Netns:"/var/run/netns/bf0d712d-587c-4319-8459-c7606af5658a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf" Path:"" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s": dial tcp 192.168.26.88:6443: connect: connection refused Dec 09 15:02:07 crc kubenswrapper[4583]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 09 15:02:07 crc kubenswrapper[4583]: > pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:07 crc kubenswrapper[4583]: E1209 15:02:07.955358 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"community-operators-2crjx_openshift-marketplace(cf53a407-5aab-44dc-a635-a8e2f68443cf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"community-operators-2crjx_openshift-marketplace(cf53a407-5aab-44dc-a635-a8e2f68443cf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-2crjx_openshift-marketplace_cf53a407-5aab-44dc-a635-a8e2f68443cf_0(d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87): error adding pod openshift-marketplace_community-operators-2crjx to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87\\\" Netns:\\\"/var/run/netns/bf0d712d-587c-4319-8459-c7606af5658a\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2crjx;K8S_POD_INFRA_CONTAINER_ID=d21d67febd0197a8bf4afad4238da9f94274a98c1dc38f64845894725ea48e87;K8S_POD_UID=cf53a407-5aab-44dc-a635-a8e2f68443cf\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/community-operators-2crjx] networking: Multus: [openshift-marketplace/community-operators-2crjx/cf53a407-5aab-44dc-a635-a8e2f68443cf]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod community-operators-2crjx in out of cluster comm: SetNetworkStatus: failed to update the pod community-operators-2crjx in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2crjx?timeout=1m0s\\\": dial tcp 192.168.26.88:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/community-operators-2crjx" podUID="cf53a407-5aab-44dc-a635-a8e2f68443cf" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.449081 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjb9f" event={"ID":"e305bf53-d531-4bbf-a6a7-38ad3a3c7189","Type":"ContainerStarted","Data":"2ad5850964e2a8b999b9970e04dd7fba268c9d9191c211936e4d0a6aaef086e8"} Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.452477 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.714246 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.714881 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.715082 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747527 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2f8d7a42-b256-4e44-9763-9e8d22086fac" (UID: "2f8d7a42-b256-4e44-9763-9e8d22086fac"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747567 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-kubelet-dir\") pod \"2f8d7a42-b256-4e44-9763-9e8d22086fac\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747605 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747659 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747691 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-var-lock\") pod \"2f8d7a42-b256-4e44-9763-9e8d22086fac\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747730 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-var-lock" (OuterVolumeSpecName: "var-lock") pod "2f8d7a42-b256-4e44-9763-9e8d22086fac" (UID: "2f8d7a42-b256-4e44-9763-9e8d22086fac"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747871 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747937 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747961 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.747980 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f8d7a42-b256-4e44-9763-9e8d22086fac-kube-api-access\") pod \"2f8d7a42-b256-4e44-9763-9e8d22086fac\" (UID: \"2f8d7a42-b256-4e44-9763-9e8d22086fac\") " Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.748034 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.749390 4583 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-var-lock\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.749572 4583 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.749580 4583 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.749589 4583 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2f8d7a42-b256-4e44-9763-9e8d22086fac-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.749650 4583 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.752204 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f8d7a42-b256-4e44-9763-9e8d22086fac-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2f8d7a42-b256-4e44-9763-9e8d22086fac" (UID: "2f8d7a42-b256-4e44-9763-9e8d22086fac"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:02:08 crc kubenswrapper[4583]: I1209 15:02:08.850086 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f8d7a42-b256-4e44-9763-9e8d22086fac-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.457451 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2f8d7a42-b256-4e44-9763-9e8d22086fac","Type":"ContainerDied","Data":"ae7b507b00a6a23d9defb31cd92fc9826c979e951868ce83dfeda3bf32a5724b"} Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.457494 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae7b507b00a6a23d9defb31cd92fc9826c979e951868ce83dfeda3bf32a5724b" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.457464 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.459631 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.460216 4583 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96" exitCode=0 Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.460314 4583 scope.go:117] "RemoveContainer" containerID="ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.460336 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.473718 4583 scope.go:117] "RemoveContainer" containerID="d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.483420 4583 scope.go:117] "RemoveContainer" containerID="6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.497770 4583 scope.go:117] "RemoveContainer" containerID="abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.508606 4583 scope.go:117] "RemoveContainer" containerID="d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.522087 4583 scope.go:117] "RemoveContainer" containerID="5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.535560 4583 scope.go:117] "RemoveContainer" containerID="ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011" Dec 09 15:02:09 crc kubenswrapper[4583]: E1209 15:02:09.535890 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\": container with ID starting with ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011 not found: ID does not exist" containerID="ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.535924 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011"} err="failed to get container status \"ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\": rpc error: code = NotFound desc = could not find container \"ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011\": container with ID starting with ae3d298cae4f694f55809a11164e85bfbe1bd4fc41c02f15a20c3bc125cdb011 not found: ID does not exist" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.535945 4583 scope.go:117] "RemoveContainer" containerID="d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d" Dec 09 15:02:09 crc kubenswrapper[4583]: E1209 15:02:09.536302 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\": container with ID starting with d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d not found: ID does not exist" containerID="d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.536334 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d"} err="failed to get container status \"d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\": rpc error: code = NotFound desc = could not find container \"d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d\": container with ID starting with d8ed4bf48c43bba227f89d3edb8a1e1baf32f04a33be6b20cbeb5709e49b091d not found: ID does not exist" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.536354 4583 scope.go:117] "RemoveContainer" containerID="6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578" Dec 09 15:02:09 crc kubenswrapper[4583]: E1209 15:02:09.536557 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\": container with ID starting with 6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578 not found: ID does not exist" containerID="6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.536575 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578"} err="failed to get container status \"6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\": rpc error: code = NotFound desc = could not find container \"6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578\": container with ID starting with 6fdb6f3bf7a6df735925d2b25a1df64fcba640e63485e3c7e462143bc6f71578 not found: ID does not exist" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.536587 4583 scope.go:117] "RemoveContainer" containerID="abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16" Dec 09 15:02:09 crc kubenswrapper[4583]: E1209 15:02:09.537325 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\": container with ID starting with abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16 not found: ID does not exist" containerID="abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.537354 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16"} err="failed to get container status \"abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\": rpc error: code = NotFound desc = could not find container \"abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16\": container with ID starting with abfac7a143868fb3fade676d7672a1d44bb5018e6ad7f4f21680bbefbc69cc16 not found: ID does not exist" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.537376 4583 scope.go:117] "RemoveContainer" containerID="d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96" Dec 09 15:02:09 crc kubenswrapper[4583]: E1209 15:02:09.537616 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\": container with ID starting with d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96 not found: ID does not exist" containerID="d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.537651 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96"} err="failed to get container status \"d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\": rpc error: code = NotFound desc = could not find container \"d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96\": container with ID starting with d7626ee6ebc3d2fee4a97786e1a9063efa87ad7594accb9c2e5602516b14fb96 not found: ID does not exist" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.537667 4583 scope.go:117] "RemoveContainer" containerID="5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb" Dec 09 15:02:09 crc kubenswrapper[4583]: E1209 15:02:09.537920 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\": container with ID starting with 5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb not found: ID does not exist" containerID="5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb" Dec 09 15:02:09 crc kubenswrapper[4583]: I1209 15:02:09.537943 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb"} err="failed to get container status \"5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\": rpc error: code = NotFound desc = could not find container \"5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb\": container with ID starting with 5bdbc7d677895129f04f14d34df49e5482b36a369d090a4715951190125b66fb not found: ID does not exist" Dec 09 15:02:10 crc kubenswrapper[4583]: I1209 15:02:10.083581 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 09 15:02:11 crc kubenswrapper[4583]: E1209 15:02:11.170578 4583 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 192.168.26.88:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" volumeName="registry-storage" Dec 09 15:02:11 crc kubenswrapper[4583]: E1209 15:02:11.420623 4583 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 192.168.26.88:6443: connect: connection refused" event="&Event{ObjectMeta:{certified-operators-kjb9f.187f943329d9d4d8 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:certified-operators-kjb9f,UID:e305bf53-d531-4bbf-a6a7-38ad3a3c7189,APIVersion:v1,ResourceVersion:29504,FieldPath:spec.initContainers{extract-content},},Reason:Pulling,Message:Pulling image \"registry.redhat.io/redhat/certified-operator-index:v4.18\",Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-09 15:02:06.418908376 +0000 UTC m=+224.626357454,LastTimestamp:2025-12-09 15:02:06.418908376 +0000 UTC m=+224.626357454,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 09 15:02:11 crc kubenswrapper[4583]: I1209 15:02:11.430086 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:11 crc kubenswrapper[4583]: I1209 15:02:11.430328 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:11 crc kubenswrapper[4583]: I1209 15:02:11.430604 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:11 crc kubenswrapper[4583]: I1209 15:02:11.430790 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:11 crc kubenswrapper[4583]: I1209 15:02:11.430937 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:11 crc kubenswrapper[4583]: I1209 15:02:11.431085 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:12 crc kubenswrapper[4583]: I1209 15:02:12.079504 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:12 crc kubenswrapper[4583]: I1209 15:02:12.080015 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:12 crc kubenswrapper[4583]: I1209 15:02:12.080200 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:12 crc kubenswrapper[4583]: I1209 15:02:12.080439 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: E1209 15:02:13.283104 4583 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 192.168.26.88:6443: connect: connection refused" event="&Event{ObjectMeta:{certified-operators-kjb9f.187f943329d9d4d8 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:certified-operators-kjb9f,UID:e305bf53-d531-4bbf-a6a7-38ad3a3c7189,APIVersion:v1,ResourceVersion:29504,FieldPath:spec.initContainers{extract-content},},Reason:Pulling,Message:Pulling image \"registry.redhat.io/redhat/certified-operator-index:v4.18\",Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-09 15:02:06.418908376 +0000 UTC m=+224.626357454,LastTimestamp:2025-12-09 15:02:06.418908376 +0000 UTC m=+224.626357454,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.516712 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.516781 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.546571 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.546999 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.547256 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.547510 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.547868 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.729381 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.729425 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.758378 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.758804 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.759351 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.759654 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:13 crc kubenswrapper[4583]: I1209 15:02:13.759874 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.503776 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c46x5" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.503845 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wzxcc" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.504155 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.505100 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.505412 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.505635 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.505910 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.506121 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.506303 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:14 crc kubenswrapper[4583]: I1209 15:02:14.506465 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.910494 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.910533 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.936884 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.937215 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.937570 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.937820 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:15 crc kubenswrapper[4583]: I1209 15:02:15.938036 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.445167 4583 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.445809 4583 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.446181 4583 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.446428 4583 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.446699 4583 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: I1209 15:02:16.446734 4583 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.446917 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="200ms" Dec 09 15:02:16 crc kubenswrapper[4583]: I1209 15:02:16.511275 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kjb9f" Dec 09 15:02:16 crc kubenswrapper[4583]: I1209 15:02:16.511614 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: I1209 15:02:16.511879 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: I1209 15:02:16.512091 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: I1209 15:02:16.512332 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:16 crc kubenswrapper[4583]: E1209 15:02:16.647948 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="400ms" Dec 09 15:02:17 crc kubenswrapper[4583]: E1209 15:02:17.048712 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="800ms" Dec 09 15:02:17 crc kubenswrapper[4583]: E1209 15:02:17.849116 4583 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.88:6443: connect: connection refused" interval="1.6s" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.078591 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.079398 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.079949 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.080203 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.080426 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.089395 4583 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.089421 4583 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:18 crc kubenswrapper[4583]: E1209 15:02:18.089720 4583 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.090084 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.493631 4583 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="7ae34c778990ec9d6a687d02af3efc6bf682d99c52d3d6e74322b5190836942f" exitCode=0 Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.493723 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"7ae34c778990ec9d6a687d02af3efc6bf682d99c52d3d6e74322b5190836942f"} Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.493842 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ee67bf37a6d033af7c9bda0a9184cec99b9038ab559e980fda8d00f6f8a05f62"} Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.494042 4583 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.494059 4583 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.494264 4583 status_manager.go:851] "Failed to get status for pod" podUID="2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c" pod="openshift-marketplace/redhat-operators-c46x5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-c46x5\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: E1209 15:02:18.494280 4583 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.494476 4583 status_manager.go:851] "Failed to get status for pod" podUID="e305bf53-d531-4bbf-a6a7-38ad3a3c7189" pod="openshift-marketplace/certified-operators-kjb9f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kjb9f\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.494868 4583 status_manager.go:851] "Failed to get status for pod" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:18 crc kubenswrapper[4583]: I1209 15:02:18.495237 4583 status_manager.go:851] "Failed to get status for pod" podUID="cbdedc1b-dd30-46cf-b517-3c3f626560a3" pod="openshift-marketplace/redhat-marketplace-wzxcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wzxcc\": dial tcp 192.168.26.88:6443: connect: connection refused" Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501180 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"92e1431a2ae5a0ee754bf10bb03d351031e8e6341e8f930ea455511c29b2a750"} Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501222 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f223e61fdbdf178257ea8945a651ab5e5232dddc5fd16586ecab152149b17fd5"} Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501233 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"75d56500326a74ab4e4a96cbda0bd20eeb6ab158751f882b3f8fac1ee952184f"} Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501242 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"679e76a8ae20113b9c1a6736bfd504efc3b9398c9ac0263b561d08b6a6b411e1"} Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501252 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"69cdb3cb1aa228a363f0cfd4a72a6575c13ce00257233a710917f271dd002ace"} Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501413 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501471 4583 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:19 crc kubenswrapper[4583]: I1209 15:02:19.501484 4583 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:21 crc kubenswrapper[4583]: I1209 15:02:21.511574 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 09 15:02:21 crc kubenswrapper[4583]: I1209 15:02:21.511779 4583 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc" exitCode=1 Dec 09 15:02:21 crc kubenswrapper[4583]: I1209 15:02:21.511806 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc"} Dec 09 15:02:21 crc kubenswrapper[4583]: I1209 15:02:21.512136 4583 scope.go:117] "RemoveContainer" containerID="7a99866dce8ed67e11898daebec8214a6d499e1473bb9ba9a952a5e0b183a6cc" Dec 09 15:02:22 crc kubenswrapper[4583]: I1209 15:02:22.517460 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 09 15:02:22 crc kubenswrapper[4583]: I1209 15:02:22.517656 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6db210c27ac6947fc38797c962dec304383ef711089b1c14b32b0103830ef136"} Dec 09 15:02:22 crc kubenswrapper[4583]: I1209 15:02:22.577652 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 15:02:22 crc kubenswrapper[4583]: I1209 15:02:22.585234 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.078594 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.079067 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.091015 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.091049 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.094654 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:23 crc kubenswrapper[4583]: W1209 15:02:23.416275 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf53a407_5aab_44dc_a635_a8e2f68443cf.slice/crio-f257c702b6ba1aca4a07e9950b2fcccea0662cf13d6420d502219be69b85807d WatchSource:0}: Error finding container f257c702b6ba1aca4a07e9950b2fcccea0662cf13d6420d502219be69b85807d: Status 404 returned error can't find the container with id f257c702b6ba1aca4a07e9950b2fcccea0662cf13d6420d502219be69b85807d Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.522273 4583 generic.go:334] "Generic (PLEG): container finished" podID="cf53a407-5aab-44dc-a635-a8e2f68443cf" containerID="5730171ce4dc523294182367ec4497e2b184948d8221b476a499a4ba0b1d7703" exitCode=0 Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.522355 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2crjx" event={"ID":"cf53a407-5aab-44dc-a635-a8e2f68443cf","Type":"ContainerDied","Data":"5730171ce4dc523294182367ec4497e2b184948d8221b476a499a4ba0b1d7703"} Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.522394 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2crjx" event={"ID":"cf53a407-5aab-44dc-a635-a8e2f68443cf","Type":"ContainerStarted","Data":"f257c702b6ba1aca4a07e9950b2fcccea0662cf13d6420d502219be69b85807d"} Dec 09 15:02:23 crc kubenswrapper[4583]: I1209 15:02:23.522530 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 15:02:24 crc kubenswrapper[4583]: I1209 15:02:24.527884 4583 generic.go:334] "Generic (PLEG): container finished" podID="cf53a407-5aab-44dc-a635-a8e2f68443cf" containerID="56aacfd5ae79b95c5b68f658ee6b47abf9c2963c7dcff27301f46e43fbae508a" exitCode=0 Dec 09 15:02:24 crc kubenswrapper[4583]: I1209 15:02:24.527947 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2crjx" event={"ID":"cf53a407-5aab-44dc-a635-a8e2f68443cf","Type":"ContainerDied","Data":"56aacfd5ae79b95c5b68f658ee6b47abf9c2963c7dcff27301f46e43fbae508a"} Dec 09 15:02:24 crc kubenswrapper[4583]: I1209 15:02:24.735181 4583 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:24 crc kubenswrapper[4583]: I1209 15:02:24.838508 4583 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c4ed1ae3-9c8f-4c88-b07e-2043d7cf3e71" Dec 09 15:02:25 crc kubenswrapper[4583]: I1209 15:02:25.533313 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2crjx" event={"ID":"cf53a407-5aab-44dc-a635-a8e2f68443cf","Type":"ContainerStarted","Data":"120d21fbb1a0787234e11574f3ed7dfcf9b725e4f2c27709874081759da914ba"} Dec 09 15:02:25 crc kubenswrapper[4583]: I1209 15:02:25.533546 4583 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:25 crc kubenswrapper[4583]: I1209 15:02:25.533567 4583 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:25 crc kubenswrapper[4583]: I1209 15:02:25.535523 4583 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c4ed1ae3-9c8f-4c88-b07e-2043d7cf3e71" Dec 09 15:02:25 crc kubenswrapper[4583]: I1209 15:02:25.536421 4583 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://69cdb3cb1aa228a363f0cfd4a72a6575c13ce00257233a710917f271dd002ace" Dec 09 15:02:25 crc kubenswrapper[4583]: I1209 15:02:25.536446 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:26 crc kubenswrapper[4583]: I1209 15:02:26.119734 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:26 crc kubenswrapper[4583]: I1209 15:02:26.119813 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:26 crc kubenswrapper[4583]: I1209 15:02:26.537937 4583 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:26 crc kubenswrapper[4583]: I1209 15:02:26.538162 4583 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="29ac013b-8543-4f00-bc14-22441f765f91" Dec 09 15:02:26 crc kubenswrapper[4583]: I1209 15:02:26.539656 4583 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c4ed1ae3-9c8f-4c88-b07e-2043d7cf3e71" Dec 09 15:02:27 crc kubenswrapper[4583]: I1209 15:02:27.144178 4583 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-2crjx" podUID="cf53a407-5aab-44dc-a635-a8e2f68443cf" containerName="registry-server" probeResult="failure" output=< Dec 09 15:02:27 crc kubenswrapper[4583]: timeout: failed to connect service ":50051" within 1s Dec 09 15:02:27 crc kubenswrapper[4583]: > Dec 09 15:02:31 crc kubenswrapper[4583]: I1209 15:02:31.465253 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 09 15:02:31 crc kubenswrapper[4583]: I1209 15:02:31.622619 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 09 15:02:33 crc kubenswrapper[4583]: I1209 15:02:33.264854 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 09 15:02:33 crc kubenswrapper[4583]: I1209 15:02:33.306234 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 09 15:02:34 crc kubenswrapper[4583]: I1209 15:02:34.092494 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 09 15:02:34 crc kubenswrapper[4583]: I1209 15:02:34.506329 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 09 15:02:34 crc kubenswrapper[4583]: I1209 15:02:34.530697 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 09 15:02:34 crc kubenswrapper[4583]: I1209 15:02:34.775265 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 09 15:02:35 crc kubenswrapper[4583]: I1209 15:02:35.730581 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 09 15:02:35 crc kubenswrapper[4583]: I1209 15:02:35.950282 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 09 15:02:35 crc kubenswrapper[4583]: I1209 15:02:35.980634 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 09 15:02:36 crc kubenswrapper[4583]: I1209 15:02:36.143274 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:36 crc kubenswrapper[4583]: I1209 15:02:36.167230 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2crjx" Dec 09 15:02:36 crc kubenswrapper[4583]: I1209 15:02:36.176287 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 09 15:02:36 crc kubenswrapper[4583]: I1209 15:02:36.252890 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 09 15:02:36 crc kubenswrapper[4583]: I1209 15:02:36.564559 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 09 15:02:36 crc kubenswrapper[4583]: I1209 15:02:36.773460 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.043469 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.125613 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.274683 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.350183 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.845118 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.876557 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.921731 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 09 15:02:37 crc kubenswrapper[4583]: I1209 15:02:37.965278 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.066855 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.070288 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.141671 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.153970 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.331046 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.351156 4583 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.400574 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.676434 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.692476 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.816818 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.841007 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 09 15:02:38 crc kubenswrapper[4583]: I1209 15:02:38.960166 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.151407 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.174608 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.261134 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.266156 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.310162 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.353616 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.478423 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.636510 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.697630 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.751499 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.935265 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 09 15:02:39 crc kubenswrapper[4583]: I1209 15:02:39.949423 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.041124 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.128944 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.356508 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.378866 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.400053 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.578799 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.585397 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.700905 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.712008 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 09 15:02:40 crc kubenswrapper[4583]: I1209 15:02:40.934884 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.092675 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.096468 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.107664 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.211062 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.297259 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.382980 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.461896 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.492104 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.518697 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.614760 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.615998 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.616427 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.638088 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.706301 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.721174 4583 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.721224 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.776335 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.856457 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.917110 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.927205 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 09 15:02:41 crc kubenswrapper[4583]: I1209 15:02:41.967453 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.017793 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.050037 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.091696 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.209856 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.212716 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.315848 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.338705 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.480406 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.508178 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.538601 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.599927 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.604669 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.757379 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.855058 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.970589 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.979654 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 09 15:02:42 crc kubenswrapper[4583]: I1209 15:02:42.989701 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.161803 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.221501 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.268693 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.282577 4583 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.283314 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wzxcc" podStartSLOduration=38.752771228 podStartE2EDuration="40.28330158s" podCreationTimestamp="2025-12-09 15:02:03 +0000 UTC" firstStartedPulling="2025-12-09 15:02:04.386228616 +0000 UTC m=+222.593677704" lastFinishedPulling="2025-12-09 15:02:05.916758979 +0000 UTC m=+224.124208056" observedRunningTime="2025-12-09 15:02:24.765375996 +0000 UTC m=+242.972825084" watchObservedRunningTime="2025-12-09 15:02:43.28330158 +0000 UTC m=+261.490750668" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.283500 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kjb9f" podStartSLOduration=36.797234323 podStartE2EDuration="38.283495486s" podCreationTimestamp="2025-12-09 15:02:05 +0000 UTC" firstStartedPulling="2025-12-09 15:02:06.418904048 +0000 UTC m=+224.626353136" lastFinishedPulling="2025-12-09 15:02:07.905165211 +0000 UTC m=+226.112614299" observedRunningTime="2025-12-09 15:02:24.746564594 +0000 UTC m=+242.954013683" watchObservedRunningTime="2025-12-09 15:02:43.283495486 +0000 UTC m=+261.490944574" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.284305 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2crjx" podStartSLOduration=36.79167035 podStartE2EDuration="38.284301478s" podCreationTimestamp="2025-12-09 15:02:05 +0000 UTC" firstStartedPulling="2025-12-09 15:02:23.523662194 +0000 UTC m=+241.731111282" lastFinishedPulling="2025-12-09 15:02:25.016293322 +0000 UTC m=+243.223742410" observedRunningTime="2025-12-09 15:02:25.548182468 +0000 UTC m=+243.755631557" watchObservedRunningTime="2025-12-09 15:02:43.284301478 +0000 UTC m=+261.491750566" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.285539 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c46x5" podStartSLOduration=37.727071157 podStartE2EDuration="40.285535971s" podCreationTimestamp="2025-12-09 15:02:03 +0000 UTC" firstStartedPulling="2025-12-09 15:02:04.384762526 +0000 UTC m=+222.592211614" lastFinishedPulling="2025-12-09 15:02:06.943227339 +0000 UTC m=+225.150676428" observedRunningTime="2025-12-09 15:02:24.77758826 +0000 UTC m=+242.985037347" watchObservedRunningTime="2025-12-09 15:02:43.285535971 +0000 UTC m=+261.492985059" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.285802 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.285832 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.285847 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2crjx"] Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.288813 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.289033 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.298662 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.298653955 podStartE2EDuration="19.298653955s" podCreationTimestamp="2025-12-09 15:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:02:43.29658134 +0000 UTC m=+261.504030428" watchObservedRunningTime="2025-12-09 15:02:43.298653955 +0000 UTC m=+261.506103043" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.419820 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.496102 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.581445 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.585154 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.614985 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.630613 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.718216 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.819252 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.951475 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 09 15:02:43 crc kubenswrapper[4583]: I1209 15:02:43.999519 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.192240 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.238480 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.355634 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.375680 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.435597 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.452479 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.633074 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.864776 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.914548 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 09 15:02:44 crc kubenswrapper[4583]: I1209 15:02:44.987511 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.003512 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.070276 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.184032 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.206418 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.242531 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.246429 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.280336 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.299111 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.312554 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.366238 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.502338 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.563160 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.663552 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.714517 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.748014 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.792638 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.805920 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.855807 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.903829 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.911768 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.919434 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.981258 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.983836 4583 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 09 15:02:45 crc kubenswrapper[4583]: I1209 15:02:45.998847 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.005628 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.027901 4583 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.038568 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.110711 4583 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.110922 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://7da33174343423eb47e7bd21b7c565bcb945001ceb2f859d7b4e479426e27767" gracePeriod=5 Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.245672 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.267426 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.308496 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.384622 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.468440 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.505798 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.624026 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.705685 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.763114 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.770963 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.776654 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.785391 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.791137 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.935680 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 09 15:02:46 crc kubenswrapper[4583]: I1209 15:02:46.947521 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.005760 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.017268 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.023280 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.214186 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.233153 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.329667 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.329813 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.384875 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.518585 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.563374 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.604756 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.659353 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.659609 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.684559 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.753847 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.793789 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.930660 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 09 15:02:47 crc kubenswrapper[4583]: I1209 15:02:47.943691 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.000973 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.137271 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.251025 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.329971 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.405568 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.409322 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.431197 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.535087 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.610136 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.680029 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.687404 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.689127 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.702475 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.729278 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.830455 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 09 15:02:48 crc kubenswrapper[4583]: I1209 15:02:48.912317 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.005316 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.035009 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.045763 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.065295 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.118790 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.179569 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.193711 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.255764 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.334116 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.400398 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.437507 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.447874 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.762546 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.786082 4583 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.794676 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.805823 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 09 15:02:49 crc kubenswrapper[4583]: I1209 15:02:49.903196 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.141783 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.176856 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.221619 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.283694 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.286244 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.295997 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.314559 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.319177 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.319603 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.413200 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.431476 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.481669 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.507448 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.552363 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.592914 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.680866 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.700785 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.896609 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 09 15:02:50 crc kubenswrapper[4583]: I1209 15:02:50.996794 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.137257 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.159541 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.164796 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.473185 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.576207 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.631085 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.631135 4583 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="7da33174343423eb47e7bd21b7c565bcb945001ceb2f859d7b4e479426e27767" exitCode=137 Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.656812 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.656867 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.685715 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.720813 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760057 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760094 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760134 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760215 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760217 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760255 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760264 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760300 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760367 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760485 4583 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760507 4583 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760518 4583 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.760527 4583 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.765546 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.767486 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.861379 4583 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 09 15:02:51 crc kubenswrapper[4583]: I1209 15:02:51.934058 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.083498 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.134169 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.546552 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.635559 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.635638 4583 scope.go:117] "RemoveContainer" containerID="7da33174343423eb47e7bd21b7c565bcb945001ceb2f859d7b4e479426e27767" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.635684 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 09 15:02:52 crc kubenswrapper[4583]: I1209 15:02:52.877106 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.145640 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.217174 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.251021 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.313327 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.381617 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.592075 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.740439 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 09 15:02:53 crc kubenswrapper[4583]: I1209 15:02:53.920643 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 09 15:02:54 crc kubenswrapper[4583]: I1209 15:02:54.171091 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.046644 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c"] Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.047197 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" podUID="1e08c725-bc3f-4816-bf4c-a6bdecded47e" containerName="route-controller-manager" containerID="cri-o://412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197" gracePeriod=30 Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.049035 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tvq7j"] Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.049204 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" podUID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" containerName="controller-manager" containerID="cri-o://9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4" gracePeriod=30 Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.358448 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.361915 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483055 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16821b35-4f2e-4733-a44c-3e3a8caaf98a-serving-cert\") pod \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483102 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e08c725-bc3f-4816-bf4c-a6bdecded47e-serving-cert\") pod \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483134 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-config\") pod \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483151 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-client-ca\") pod \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483200 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-proxy-ca-bundles\") pod \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483646 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-client-ca" (OuterVolumeSpecName: "client-ca") pod "16821b35-4f2e-4733-a44c-3e3a8caaf98a" (UID: "16821b35-4f2e-4733-a44c-3e3a8caaf98a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483677 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "16821b35-4f2e-4733-a44c-3e3a8caaf98a" (UID: "16821b35-4f2e-4733-a44c-3e3a8caaf98a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483699 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-client-ca\") pod \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483730 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-config\") pod \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483786 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzzj2\" (UniqueName: \"kubernetes.io/projected/16821b35-4f2e-4733-a44c-3e3a8caaf98a-kube-api-access-xzzj2\") pod \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\" (UID: \"16821b35-4f2e-4733-a44c-3e3a8caaf98a\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483811 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhrpf\" (UniqueName: \"kubernetes.io/projected/1e08c725-bc3f-4816-bf4c-a6bdecded47e-kube-api-access-vhrpf\") pod \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\" (UID: \"1e08c725-bc3f-4816-bf4c-a6bdecded47e\") " Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.483949 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-client-ca" (OuterVolumeSpecName: "client-ca") pod "1e08c725-bc3f-4816-bf4c-a6bdecded47e" (UID: "1e08c725-bc3f-4816-bf4c-a6bdecded47e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484028 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-config" (OuterVolumeSpecName: "config") pod "1e08c725-bc3f-4816-bf4c-a6bdecded47e" (UID: "1e08c725-bc3f-4816-bf4c-a6bdecded47e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484207 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-config" (OuterVolumeSpecName: "config") pod "16821b35-4f2e-4733-a44c-3e3a8caaf98a" (UID: "16821b35-4f2e-4733-a44c-3e3a8caaf98a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484375 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484394 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484402 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e08c725-bc3f-4816-bf4c-a6bdecded47e-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484411 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.484421 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/16821b35-4f2e-4733-a44c-3e3a8caaf98a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.487526 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e08c725-bc3f-4816-bf4c-a6bdecded47e-kube-api-access-vhrpf" (OuterVolumeSpecName: "kube-api-access-vhrpf") pod "1e08c725-bc3f-4816-bf4c-a6bdecded47e" (UID: "1e08c725-bc3f-4816-bf4c-a6bdecded47e"). InnerVolumeSpecName "kube-api-access-vhrpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.487557 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16821b35-4f2e-4733-a44c-3e3a8caaf98a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "16821b35-4f2e-4733-a44c-3e3a8caaf98a" (UID: "16821b35-4f2e-4733-a44c-3e3a8caaf98a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.487568 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16821b35-4f2e-4733-a44c-3e3a8caaf98a-kube-api-access-xzzj2" (OuterVolumeSpecName: "kube-api-access-xzzj2") pod "16821b35-4f2e-4733-a44c-3e3a8caaf98a" (UID: "16821b35-4f2e-4733-a44c-3e3a8caaf98a"). InnerVolumeSpecName "kube-api-access-xzzj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.488026 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e08c725-bc3f-4816-bf4c-a6bdecded47e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1e08c725-bc3f-4816-bf4c-a6bdecded47e" (UID: "1e08c725-bc3f-4816-bf4c-a6bdecded47e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.585056 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16821b35-4f2e-4733-a44c-3e3a8caaf98a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.585089 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e08c725-bc3f-4816-bf4c-a6bdecded47e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.585098 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzzj2\" (UniqueName: \"kubernetes.io/projected/16821b35-4f2e-4733-a44c-3e3a8caaf98a-kube-api-access-xzzj2\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.585109 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhrpf\" (UniqueName: \"kubernetes.io/projected/1e08c725-bc3f-4816-bf4c-a6bdecded47e-kube-api-access-vhrpf\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.727839 4583 generic.go:334] "Generic (PLEG): container finished" podID="1e08c725-bc3f-4816-bf4c-a6bdecded47e" containerID="412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197" exitCode=0 Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.727902 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" event={"ID":"1e08c725-bc3f-4816-bf4c-a6bdecded47e","Type":"ContainerDied","Data":"412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197"} Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.727923 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.727936 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c" event={"ID":"1e08c725-bc3f-4816-bf4c-a6bdecded47e","Type":"ContainerDied","Data":"c78972a4bcb0c6c885f3b61cea6593b0f804aea925582f95126e64391373fbfa"} Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.727952 4583 scope.go:117] "RemoveContainer" containerID="412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.729365 4583 generic.go:334] "Generic (PLEG): container finished" podID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" containerID="9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4" exitCode=0 Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.729388 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" event={"ID":"16821b35-4f2e-4733-a44c-3e3a8caaf98a","Type":"ContainerDied","Data":"9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4"} Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.729404 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" event={"ID":"16821b35-4f2e-4733-a44c-3e3a8caaf98a","Type":"ContainerDied","Data":"26fd81b1f6bdf80a69806d25d0c6be9b66dd2288d7b6378bdddd6b9c5e5c069d"} Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.729413 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tvq7j" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.740548 4583 scope.go:117] "RemoveContainer" containerID="412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197" Dec 09 15:03:16 crc kubenswrapper[4583]: E1209 15:03:16.741045 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197\": container with ID starting with 412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197 not found: ID does not exist" containerID="412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.741071 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197"} err="failed to get container status \"412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197\": rpc error: code = NotFound desc = could not find container \"412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197\": container with ID starting with 412f2fe12687e4918816097e638d2550a45e437b71bd569d50ab6ca087cd8197 not found: ID does not exist" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.741087 4583 scope.go:117] "RemoveContainer" containerID="9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.749269 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c"] Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.750157 4583 scope.go:117] "RemoveContainer" containerID="9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4" Dec 09 15:03:16 crc kubenswrapper[4583]: E1209 15:03:16.750777 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4\": container with ID starting with 9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4 not found: ID does not exist" containerID="9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.750803 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4"} err="failed to get container status \"9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4\": rpc error: code = NotFound desc = could not find container \"9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4\": container with ID starting with 9f0b0ee02c330d6df4064df2833750e104ec27d6a2799a2ce94f022fbb046fb4 not found: ID does not exist" Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.751316 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vqh9c"] Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.754774 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tvq7j"] Dec 09 15:03:16 crc kubenswrapper[4583]: I1209 15:03:16.757711 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tvq7j"] Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527204 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-76464bf686-7vq48"] Dec 09 15:03:17 crc kubenswrapper[4583]: E1209 15:03:17.527379 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527390 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 09 15:03:17 crc kubenswrapper[4583]: E1209 15:03:17.527403 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" containerName="installer" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527409 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" containerName="installer" Dec 09 15:03:17 crc kubenswrapper[4583]: E1209 15:03:17.527417 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" containerName="controller-manager" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527422 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" containerName="controller-manager" Dec 09 15:03:17 crc kubenswrapper[4583]: E1209 15:03:17.527434 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e08c725-bc3f-4816-bf4c-a6bdecded47e" containerName="route-controller-manager" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527439 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e08c725-bc3f-4816-bf4c-a6bdecded47e" containerName="route-controller-manager" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527520 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e08c725-bc3f-4816-bf4c-a6bdecded47e" containerName="route-controller-manager" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527539 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" containerName="controller-manager" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527546 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527553 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8d7a42-b256-4e44-9763-9e8d22086fac" containerName="installer" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.527872 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.529313 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.529370 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.529406 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg"] Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.529634 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.529790 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.529907 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.530135 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.531846 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.531873 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.531975 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.532033 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.532207 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.532692 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.532964 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.535675 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg"] Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.536413 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.539582 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-76464bf686-7vq48"] Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.593836 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66d2f10c-132d-4a39-8bb4-956beb121592-serving-cert\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.593868 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-config\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.593889 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-config\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.593919 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-client-ca\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.593937 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-proxy-ca-bundles\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.594041 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eea6a02c-85c4-4bd3-85dc-4261c756420d-serving-cert\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.594081 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh2zt\" (UniqueName: \"kubernetes.io/projected/66d2f10c-132d-4a39-8bb4-956beb121592-kube-api-access-qh2zt\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.594153 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-client-ca\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.594174 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7qcz\" (UniqueName: \"kubernetes.io/projected/eea6a02c-85c4-4bd3-85dc-4261c756420d-kube-api-access-c7qcz\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694621 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-client-ca\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694651 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7qcz\" (UniqueName: \"kubernetes.io/projected/eea6a02c-85c4-4bd3-85dc-4261c756420d-kube-api-access-c7qcz\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694685 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66d2f10c-132d-4a39-8bb4-956beb121592-serving-cert\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694701 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-config\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694716 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-config\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694761 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-client-ca\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694781 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-proxy-ca-bundles\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694801 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eea6a02c-85c4-4bd3-85dc-4261c756420d-serving-cert\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.694816 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh2zt\" (UniqueName: \"kubernetes.io/projected/66d2f10c-132d-4a39-8bb4-956beb121592-kube-api-access-qh2zt\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.695681 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-client-ca\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.695888 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-client-ca\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.696381 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-config\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.696390 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-proxy-ca-bundles\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.696580 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-config\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.698232 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66d2f10c-132d-4a39-8bb4-956beb121592-serving-cert\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.698505 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eea6a02c-85c4-4bd3-85dc-4261c756420d-serving-cert\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.707161 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh2zt\" (UniqueName: \"kubernetes.io/projected/66d2f10c-132d-4a39-8bb4-956beb121592-kube-api-access-qh2zt\") pod \"route-controller-manager-5d57cfbc8-jftcg\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.708267 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7qcz\" (UniqueName: \"kubernetes.io/projected/eea6a02c-85c4-4bd3-85dc-4261c756420d-kube-api-access-c7qcz\") pod \"controller-manager-76464bf686-7vq48\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.839629 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:17 crc kubenswrapper[4583]: I1209 15:03:17.844854 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.083109 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16821b35-4f2e-4733-a44c-3e3a8caaf98a" path="/var/lib/kubelet/pods/16821b35-4f2e-4733-a44c-3e3a8caaf98a/volumes" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.083764 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e08c725-bc3f-4816-bf4c-a6bdecded47e" path="/var/lib/kubelet/pods/1e08c725-bc3f-4816-bf4c-a6bdecded47e/volumes" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.169210 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg"] Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.201594 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-76464bf686-7vq48"] Dec 09 15:03:18 crc kubenswrapper[4583]: W1209 15:03:18.206309 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeea6a02c_85c4_4bd3_85dc_4261c756420d.slice/crio-5dff3e4d384304370782ab7212c721f80b3864412317c0a8c8c7d8832b58a37b WatchSource:0}: Error finding container 5dff3e4d384304370782ab7212c721f80b3864412317c0a8c8c7d8832b58a37b: Status 404 returned error can't find the container with id 5dff3e4d384304370782ab7212c721f80b3864412317c0a8c8c7d8832b58a37b Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.739142 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" event={"ID":"66d2f10c-132d-4a39-8bb4-956beb121592","Type":"ContainerStarted","Data":"d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2"} Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.739377 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.739389 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" event={"ID":"66d2f10c-132d-4a39-8bb4-956beb121592","Type":"ContainerStarted","Data":"31c5bbea1047e126e02b232cd8f7f82d70d12564b1d6d131e29039765f2971c2"} Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.740782 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" event={"ID":"eea6a02c-85c4-4bd3-85dc-4261c756420d","Type":"ContainerStarted","Data":"2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65"} Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.740808 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" event={"ID":"eea6a02c-85c4-4bd3-85dc-4261c756420d","Type":"ContainerStarted","Data":"5dff3e4d384304370782ab7212c721f80b3864412317c0a8c8c7d8832b58a37b"} Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.741029 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.744506 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.753512 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" podStartSLOduration=2.753500635 podStartE2EDuration="2.753500635s" podCreationTimestamp="2025-12-09 15:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:18.752501749 +0000 UTC m=+296.959950838" watchObservedRunningTime="2025-12-09 15:03:18.753500635 +0000 UTC m=+296.960949724" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.765246 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" podStartSLOduration=2.765232567 podStartE2EDuration="2.765232567s" podCreationTimestamp="2025-12-09 15:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:18.762588004 +0000 UTC m=+296.970037092" watchObservedRunningTime="2025-12-09 15:03:18.765232567 +0000 UTC m=+296.972681655" Dec 09 15:03:18 crc kubenswrapper[4583]: I1209 15:03:18.778162 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:19 crc kubenswrapper[4583]: I1209 15:03:19.901357 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7llk8"] Dec 09 15:03:19 crc kubenswrapper[4583]: I1209 15:03:19.902170 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:19 crc kubenswrapper[4583]: I1209 15:03:19.911545 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7llk8"] Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019068 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a488660e-0ad5-4727-92eb-494b0dc20f78-trusted-ca\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019142 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-registry-tls\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019199 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019231 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a488660e-0ad5-4727-92eb-494b0dc20f78-registry-certificates\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019251 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckgwz\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-kube-api-access-ckgwz\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019270 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a488660e-0ad5-4727-92eb-494b0dc20f78-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019286 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-bound-sa-token\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.019306 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a488660e-0ad5-4727-92eb-494b0dc20f78-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.037661 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120324 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a488660e-0ad5-4727-92eb-494b0dc20f78-trusted-ca\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120381 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-registry-tls\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120436 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a488660e-0ad5-4727-92eb-494b0dc20f78-registry-certificates\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120456 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckgwz\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-kube-api-access-ckgwz\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120472 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a488660e-0ad5-4727-92eb-494b0dc20f78-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120486 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-bound-sa-token\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.120505 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a488660e-0ad5-4727-92eb-494b0dc20f78-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.121137 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a488660e-0ad5-4727-92eb-494b0dc20f78-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.121829 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a488660e-0ad5-4727-92eb-494b0dc20f78-registry-certificates\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.122658 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a488660e-0ad5-4727-92eb-494b0dc20f78-trusted-ca\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.125312 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-registry-tls\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.126010 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a488660e-0ad5-4727-92eb-494b0dc20f78-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.133387 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-bound-sa-token\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.133605 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckgwz\" (UniqueName: \"kubernetes.io/projected/a488660e-0ad5-4727-92eb-494b0dc20f78-kube-api-access-ckgwz\") pod \"image-registry-66df7c8f76-7llk8\" (UID: \"a488660e-0ad5-4727-92eb-494b0dc20f78\") " pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.213152 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.341620 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76464bf686-7vq48"] Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.437192 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg"] Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.556697 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7llk8"] Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.750250 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" event={"ID":"a488660e-0ad5-4727-92eb-494b0dc20f78","Type":"ContainerStarted","Data":"ef15c7c939d1d53a7d0c2e9909b8535ac367a409a408707c2d25716d0f8fedf0"} Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.750300 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" event={"ID":"a488660e-0ad5-4727-92eb-494b0dc20f78","Type":"ContainerStarted","Data":"253a92cfb80c93d4cf718696419a9ea809eaae4d2700a3aac6c89d4e449df896"} Dec 09 15:03:20 crc kubenswrapper[4583]: I1209 15:03:20.773314 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" podStartSLOduration=1.773301174 podStartE2EDuration="1.773301174s" podCreationTimestamp="2025-12-09 15:03:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:20.772084025 +0000 UTC m=+298.979533114" watchObservedRunningTime="2025-12-09 15:03:20.773301174 +0000 UTC m=+298.980750262" Dec 09 15:03:21 crc kubenswrapper[4583]: I1209 15:03:21.754226 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" podUID="eea6a02c-85c4-4bd3-85dc-4261c756420d" containerName="controller-manager" containerID="cri-o://2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65" gracePeriod=30 Dec 09 15:03:21 crc kubenswrapper[4583]: I1209 15:03:21.754549 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" podUID="66d2f10c-132d-4a39-8bb4-956beb121592" containerName="route-controller-manager" containerID="cri-o://d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2" gracePeriod=30 Dec 09 15:03:21 crc kubenswrapper[4583]: I1209 15:03:21.754491 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.094974 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.099371 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.116680 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-whr4h"] Dec 09 15:03:22 crc kubenswrapper[4583]: E1209 15:03:22.116870 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea6a02c-85c4-4bd3-85dc-4261c756420d" containerName="controller-manager" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.116881 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea6a02c-85c4-4bd3-85dc-4261c756420d" containerName="controller-manager" Dec 09 15:03:22 crc kubenswrapper[4583]: E1209 15:03:22.116897 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66d2f10c-132d-4a39-8bb4-956beb121592" containerName="route-controller-manager" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.116903 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="66d2f10c-132d-4a39-8bb4-956beb121592" containerName="route-controller-manager" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.116976 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea6a02c-85c4-4bd3-85dc-4261c756420d" containerName="controller-manager" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.116987 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="66d2f10c-132d-4a39-8bb4-956beb121592" containerName="route-controller-manager" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.117267 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.142230 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-whr4h"] Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249369 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eea6a02c-85c4-4bd3-85dc-4261c756420d-serving-cert\") pod \"eea6a02c-85c4-4bd3-85dc-4261c756420d\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249408 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qh2zt\" (UniqueName: \"kubernetes.io/projected/66d2f10c-132d-4a39-8bb4-956beb121592-kube-api-access-qh2zt\") pod \"66d2f10c-132d-4a39-8bb4-956beb121592\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249451 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-config\") pod \"66d2f10c-132d-4a39-8bb4-956beb121592\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249482 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7qcz\" (UniqueName: \"kubernetes.io/projected/eea6a02c-85c4-4bd3-85dc-4261c756420d-kube-api-access-c7qcz\") pod \"eea6a02c-85c4-4bd3-85dc-4261c756420d\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249552 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-client-ca\") pod \"66d2f10c-132d-4a39-8bb4-956beb121592\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249577 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-proxy-ca-bundles\") pod \"eea6a02c-85c4-4bd3-85dc-4261c756420d\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.249608 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66d2f10c-132d-4a39-8bb4-956beb121592-serving-cert\") pod \"66d2f10c-132d-4a39-8bb4-956beb121592\" (UID: \"66d2f10c-132d-4a39-8bb4-956beb121592\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250180 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-client-ca" (OuterVolumeSpecName: "client-ca") pod "66d2f10c-132d-4a39-8bb4-956beb121592" (UID: "66d2f10c-132d-4a39-8bb4-956beb121592"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250244 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-client-ca\") pod \"eea6a02c-85c4-4bd3-85dc-4261c756420d\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250250 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "eea6a02c-85c4-4bd3-85dc-4261c756420d" (UID: "eea6a02c-85c4-4bd3-85dc-4261c756420d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250264 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-config\") pod \"eea6a02c-85c4-4bd3-85dc-4261c756420d\" (UID: \"eea6a02c-85c4-4bd3-85dc-4261c756420d\") " Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250527 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-config\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250535 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-client-ca" (OuterVolumeSpecName: "client-ca") pod "eea6a02c-85c4-4bd3-85dc-4261c756420d" (UID: "eea6a02c-85c4-4bd3-85dc-4261c756420d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250626 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-client-ca\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250648 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-proxy-ca-bundles\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250675 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e160c29-ed97-4f96-8d40-c24465458d7a-serving-cert\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250767 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-config" (OuterVolumeSpecName: "config") pod "eea6a02c-85c4-4bd3-85dc-4261c756420d" (UID: "eea6a02c-85c4-4bd3-85dc-4261c756420d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250848 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn4bm\" (UniqueName: \"kubernetes.io/projected/8e160c29-ed97-4f96-8d40-c24465458d7a-kube-api-access-fn4bm\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250895 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250906 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250916 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.250924 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eea6a02c-85c4-4bd3-85dc-4261c756420d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.251182 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-config" (OuterVolumeSpecName: "config") pod "66d2f10c-132d-4a39-8bb4-956beb121592" (UID: "66d2f10c-132d-4a39-8bb4-956beb121592"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.254000 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66d2f10c-132d-4a39-8bb4-956beb121592-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "66d2f10c-132d-4a39-8bb4-956beb121592" (UID: "66d2f10c-132d-4a39-8bb4-956beb121592"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.254165 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea6a02c-85c4-4bd3-85dc-4261c756420d-kube-api-access-c7qcz" (OuterVolumeSpecName: "kube-api-access-c7qcz") pod "eea6a02c-85c4-4bd3-85dc-4261c756420d" (UID: "eea6a02c-85c4-4bd3-85dc-4261c756420d"). InnerVolumeSpecName "kube-api-access-c7qcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.254584 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66d2f10c-132d-4a39-8bb4-956beb121592-kube-api-access-qh2zt" (OuterVolumeSpecName: "kube-api-access-qh2zt") pod "66d2f10c-132d-4a39-8bb4-956beb121592" (UID: "66d2f10c-132d-4a39-8bb4-956beb121592"). InnerVolumeSpecName "kube-api-access-qh2zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.254825 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea6a02c-85c4-4bd3-85dc-4261c756420d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "eea6a02c-85c4-4bd3-85dc-4261c756420d" (UID: "eea6a02c-85c4-4bd3-85dc-4261c756420d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354265 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-config\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354322 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-client-ca\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354341 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-proxy-ca-bundles\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354362 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e160c29-ed97-4f96-8d40-c24465458d7a-serving-cert\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354460 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn4bm\" (UniqueName: \"kubernetes.io/projected/8e160c29-ed97-4f96-8d40-c24465458d7a-kube-api-access-fn4bm\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354498 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66d2f10c-132d-4a39-8bb4-956beb121592-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354509 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eea6a02c-85c4-4bd3-85dc-4261c756420d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354518 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qh2zt\" (UniqueName: \"kubernetes.io/projected/66d2f10c-132d-4a39-8bb4-956beb121592-kube-api-access-qh2zt\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354529 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66d2f10c-132d-4a39-8bb4-956beb121592-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.354537 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7qcz\" (UniqueName: \"kubernetes.io/projected/eea6a02c-85c4-4bd3-85dc-4261c756420d-kube-api-access-c7qcz\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.355262 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-proxy-ca-bundles\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.355275 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-client-ca\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.355508 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-config\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.357766 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e160c29-ed97-4f96-8d40-c24465458d7a-serving-cert\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.367323 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn4bm\" (UniqueName: \"kubernetes.io/projected/8e160c29-ed97-4f96-8d40-c24465458d7a-kube-api-access-fn4bm\") pod \"controller-manager-86b88ccc-whr4h\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.427344 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.750786 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-whr4h"] Dec 09 15:03:22 crc kubenswrapper[4583]: W1209 15:03:22.754410 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e160c29_ed97_4f96_8d40_c24465458d7a.slice/crio-6305b23b054c998f13092be0adc2decf8e598efb8adaf9ce8ef57841df3e87a7 WatchSource:0}: Error finding container 6305b23b054c998f13092be0adc2decf8e598efb8adaf9ce8ef57841df3e87a7: Status 404 returned error can't find the container with id 6305b23b054c998f13092be0adc2decf8e598efb8adaf9ce8ef57841df3e87a7 Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.759561 4583 generic.go:334] "Generic (PLEG): container finished" podID="66d2f10c-132d-4a39-8bb4-956beb121592" containerID="d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2" exitCode=0 Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.759628 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" event={"ID":"66d2f10c-132d-4a39-8bb4-956beb121592","Type":"ContainerDied","Data":"d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2"} Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.759655 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" event={"ID":"66d2f10c-132d-4a39-8bb4-956beb121592","Type":"ContainerDied","Data":"31c5bbea1047e126e02b232cd8f7f82d70d12564b1d6d131e29039765f2971c2"} Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.759671 4583 scope.go:117] "RemoveContainer" containerID="d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.760162 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.760700 4583 generic.go:334] "Generic (PLEG): container finished" podID="eea6a02c-85c4-4bd3-85dc-4261c756420d" containerID="2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65" exitCode=0 Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.760737 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" event={"ID":"eea6a02c-85c4-4bd3-85dc-4261c756420d","Type":"ContainerDied","Data":"2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65"} Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.760776 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" event={"ID":"eea6a02c-85c4-4bd3-85dc-4261c756420d","Type":"ContainerDied","Data":"5dff3e4d384304370782ab7212c721f80b3864412317c0a8c8c7d8832b58a37b"} Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.760784 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76464bf686-7vq48" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.770411 4583 scope.go:117] "RemoveContainer" containerID="d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2" Dec 09 15:03:22 crc kubenswrapper[4583]: E1209 15:03:22.770782 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2\": container with ID starting with d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2 not found: ID does not exist" containerID="d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.770816 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2"} err="failed to get container status \"d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2\": rpc error: code = NotFound desc = could not find container \"d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2\": container with ID starting with d42333886c413471447ed9f3417a0495624a8fb4020b31cb151f0755a4b9c2e2 not found: ID does not exist" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.770839 4583 scope.go:117] "RemoveContainer" containerID="2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.783131 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76464bf686-7vq48"] Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.784240 4583 scope.go:117] "RemoveContainer" containerID="2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65" Dec 09 15:03:22 crc kubenswrapper[4583]: E1209 15:03:22.784691 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65\": container with ID starting with 2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65 not found: ID does not exist" containerID="2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.784719 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65"} err="failed to get container status \"2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65\": rpc error: code = NotFound desc = could not find container \"2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65\": container with ID starting with 2523f79523d47e0a95f2dce92e894321d1b32a28a88ccd34814d84c87d1adf65 not found: ID does not exist" Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.785833 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-76464bf686-7vq48"] Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.791072 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg"] Dec 09 15:03:22 crc kubenswrapper[4583]: I1209 15:03:22.793442 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d57cfbc8-jftcg"] Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.638015 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-whr4h"] Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.665086 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn"] Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.666002 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.668188 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.668291 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.668484 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.668532 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.669318 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.671157 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.675215 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn"] Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.765661 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" event={"ID":"8e160c29-ed97-4f96-8d40-c24465458d7a","Type":"ContainerStarted","Data":"af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379"} Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.765699 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" event={"ID":"8e160c29-ed97-4f96-8d40-c24465458d7a","Type":"ContainerStarted","Data":"6305b23b054c998f13092be0adc2decf8e598efb8adaf9ce8ef57841df3e87a7"} Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.765994 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.767883 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10465bd0-c38b-4d63-a680-6561b7c5c8fd-serving-cert\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.767937 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-client-ca\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.767991 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjgw7\" (UniqueName: \"kubernetes.io/projected/10465bd0-c38b-4d63-a680-6561b7c5c8fd-kube-api-access-fjgw7\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.768021 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-config\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.770126 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.786734 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" podStartSLOduration=3.78672555 podStartE2EDuration="3.78672555s" podCreationTimestamp="2025-12-09 15:03:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:23.784247852 +0000 UTC m=+301.991696940" watchObservedRunningTime="2025-12-09 15:03:23.78672555 +0000 UTC m=+301.994174628" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.869281 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10465bd0-c38b-4d63-a680-6561b7c5c8fd-serving-cert\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.869329 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-client-ca\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.869369 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjgw7\" (UniqueName: \"kubernetes.io/projected/10465bd0-c38b-4d63-a680-6561b7c5c8fd-kube-api-access-fjgw7\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.869399 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-config\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.870455 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-client-ca\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.870686 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-config\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.873901 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10465bd0-c38b-4d63-a680-6561b7c5c8fd-serving-cert\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.883204 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjgw7\" (UniqueName: \"kubernetes.io/projected/10465bd0-c38b-4d63-a680-6561b7c5c8fd-kube-api-access-fjgw7\") pod \"route-controller-manager-647f6fbcc5-6fxvn\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:23 crc kubenswrapper[4583]: I1209 15:03:23.979493 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.083812 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66d2f10c-132d-4a39-8bb4-956beb121592" path="/var/lib/kubelet/pods/66d2f10c-132d-4a39-8bb4-956beb121592/volumes" Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.084444 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea6a02c-85c4-4bd3-85dc-4261c756420d" path="/var/lib/kubelet/pods/eea6a02c-85c4-4bd3-85dc-4261c756420d/volumes" Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.316193 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn"] Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.772510 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" podUID="8e160c29-ed97-4f96-8d40-c24465458d7a" containerName="controller-manager" containerID="cri-o://af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379" gracePeriod=30 Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.776709 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" event={"ID":"10465bd0-c38b-4d63-a680-6561b7c5c8fd","Type":"ContainerStarted","Data":"439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e"} Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.776758 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.776768 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" event={"ID":"10465bd0-c38b-4d63-a680-6561b7c5c8fd","Type":"ContainerStarted","Data":"0e0213aef4ea0620bf306cc0e18cf0d6b592b24231360f1824aae8cd515ccee7"} Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.788544 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" podStartSLOduration=1.788530836 podStartE2EDuration="1.788530836s" podCreationTimestamp="2025-12-09 15:03:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:24.785957296 +0000 UTC m=+302.993406384" watchObservedRunningTime="2025-12-09 15:03:24.788530836 +0000 UTC m=+302.995979923" Dec 09 15:03:24 crc kubenswrapper[4583]: I1209 15:03:24.914688 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.114452 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.132081 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-846df49455-fdpns"] Dec 09 15:03:25 crc kubenswrapper[4583]: E1209 15:03:25.132250 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e160c29-ed97-4f96-8d40-c24465458d7a" containerName="controller-manager" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.132265 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e160c29-ed97-4f96-8d40-c24465458d7a" containerName="controller-manager" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.132372 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e160c29-ed97-4f96-8d40-c24465458d7a" containerName="controller-manager" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.132646 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.141652 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-846df49455-fdpns"] Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.287634 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-proxy-ca-bundles\") pod \"8e160c29-ed97-4f96-8d40-c24465458d7a\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.287670 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-client-ca\") pod \"8e160c29-ed97-4f96-8d40-c24465458d7a\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.287704 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn4bm\" (UniqueName: \"kubernetes.io/projected/8e160c29-ed97-4f96-8d40-c24465458d7a-kube-api-access-fn4bm\") pod \"8e160c29-ed97-4f96-8d40-c24465458d7a\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.287722 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e160c29-ed97-4f96-8d40-c24465458d7a-serving-cert\") pod \"8e160c29-ed97-4f96-8d40-c24465458d7a\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.287739 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-config\") pod \"8e160c29-ed97-4f96-8d40-c24465458d7a\" (UID: \"8e160c29-ed97-4f96-8d40-c24465458d7a\") " Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288256 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-client-ca" (OuterVolumeSpecName: "client-ca") pod "8e160c29-ed97-4f96-8d40-c24465458d7a" (UID: "8e160c29-ed97-4f96-8d40-c24465458d7a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288265 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8e160c29-ed97-4f96-8d40-c24465458d7a" (UID: "8e160c29-ed97-4f96-8d40-c24465458d7a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288426 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-config" (OuterVolumeSpecName: "config") pod "8e160c29-ed97-4f96-8d40-c24465458d7a" (UID: "8e160c29-ed97-4f96-8d40-c24465458d7a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288547 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7d13876-6424-4b79-b3c1-7c8cec7b233c-serving-cert\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288577 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-proxy-ca-bundles\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288598 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz6ks\" (UniqueName: \"kubernetes.io/projected/f7d13876-6424-4b79-b3c1-7c8cec7b233c-kube-api-access-bz6ks\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288689 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-client-ca\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288798 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-config\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288898 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288914 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.288922 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e160c29-ed97-4f96-8d40-c24465458d7a-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.291685 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e160c29-ed97-4f96-8d40-c24465458d7a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8e160c29-ed97-4f96-8d40-c24465458d7a" (UID: "8e160c29-ed97-4f96-8d40-c24465458d7a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.291764 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e160c29-ed97-4f96-8d40-c24465458d7a-kube-api-access-fn4bm" (OuterVolumeSpecName: "kube-api-access-fn4bm") pod "8e160c29-ed97-4f96-8d40-c24465458d7a" (UID: "8e160c29-ed97-4f96-8d40-c24465458d7a"). InnerVolumeSpecName "kube-api-access-fn4bm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.389898 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-config\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.389960 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7d13876-6424-4b79-b3c1-7c8cec7b233c-serving-cert\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.389980 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-proxy-ca-bundles\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.389996 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz6ks\" (UniqueName: \"kubernetes.io/projected/f7d13876-6424-4b79-b3c1-7c8cec7b233c-kube-api-access-bz6ks\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.390024 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-client-ca\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.390076 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn4bm\" (UniqueName: \"kubernetes.io/projected/8e160c29-ed97-4f96-8d40-c24465458d7a-kube-api-access-fn4bm\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.390086 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e160c29-ed97-4f96-8d40-c24465458d7a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.390799 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-client-ca\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.391046 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-config\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.391345 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-proxy-ca-bundles\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.392818 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7d13876-6424-4b79-b3c1-7c8cec7b233c-serving-cert\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.403149 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz6ks\" (UniqueName: \"kubernetes.io/projected/f7d13876-6424-4b79-b3c1-7c8cec7b233c-kube-api-access-bz6ks\") pod \"controller-manager-846df49455-fdpns\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.445383 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.777895 4583 generic.go:334] "Generic (PLEG): container finished" podID="8e160c29-ed97-4f96-8d40-c24465458d7a" containerID="af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379" exitCode=0 Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.777941 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.777958 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" event={"ID":"8e160c29-ed97-4f96-8d40-c24465458d7a","Type":"ContainerDied","Data":"af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379"} Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.778383 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86b88ccc-whr4h" event={"ID":"8e160c29-ed97-4f96-8d40-c24465458d7a","Type":"ContainerDied","Data":"6305b23b054c998f13092be0adc2decf8e598efb8adaf9ce8ef57841df3e87a7"} Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.778406 4583 scope.go:117] "RemoveContainer" containerID="af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.793948 4583 scope.go:117] "RemoveContainer" containerID="af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.798725 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-whr4h"] Dec 09 15:03:25 crc kubenswrapper[4583]: E1209 15:03:25.799340 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379\": container with ID starting with af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379 not found: ID does not exist" containerID="af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.799365 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379"} err="failed to get container status \"af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379\": rpc error: code = NotFound desc = could not find container \"af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379\": container with ID starting with af7bdf4e26702dcb082f2440204b157f08dd08469fb669607e2c1a3bc0307379 not found: ID does not exist" Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.802763 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-846df49455-fdpns"] Dec 09 15:03:25 crc kubenswrapper[4583]: W1209 15:03:25.804393 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7d13876_6424_4b79_b3c1_7c8cec7b233c.slice/crio-4260280f9033c8c1c49df1e0c6925bcc14b3c37b0eb4a17b991b73dfef19eb90 WatchSource:0}: Error finding container 4260280f9033c8c1c49df1e0c6925bcc14b3c37b0eb4a17b991b73dfef19eb90: Status 404 returned error can't find the container with id 4260280f9033c8c1c49df1e0c6925bcc14b3c37b0eb4a17b991b73dfef19eb90 Dec 09 15:03:25 crc kubenswrapper[4583]: I1209 15:03:25.805504 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-whr4h"] Dec 09 15:03:26 crc kubenswrapper[4583]: I1209 15:03:26.083247 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e160c29-ed97-4f96-8d40-c24465458d7a" path="/var/lib/kubelet/pods/8e160c29-ed97-4f96-8d40-c24465458d7a/volumes" Dec 09 15:03:26 crc kubenswrapper[4583]: I1209 15:03:26.785270 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" event={"ID":"f7d13876-6424-4b79-b3c1-7c8cec7b233c","Type":"ContainerStarted","Data":"8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d"} Dec 09 15:03:26 crc kubenswrapper[4583]: I1209 15:03:26.785322 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" event={"ID":"f7d13876-6424-4b79-b3c1-7c8cec7b233c","Type":"ContainerStarted","Data":"4260280f9033c8c1c49df1e0c6925bcc14b3c37b0eb4a17b991b73dfef19eb90"} Dec 09 15:03:26 crc kubenswrapper[4583]: I1209 15:03:26.785873 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:26 crc kubenswrapper[4583]: I1209 15:03:26.789526 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:26 crc kubenswrapper[4583]: I1209 15:03:26.799176 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" podStartSLOduration=3.799161239 podStartE2EDuration="3.799161239s" podCreationTimestamp="2025-12-09 15:03:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:26.795897227 +0000 UTC m=+305.003346315" watchObservedRunningTime="2025-12-09 15:03:26.799161239 +0000 UTC m=+305.006610327" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.061354 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-846df49455-fdpns"] Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.062024 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" podUID="f7d13876-6424-4b79-b3c1-7c8cec7b233c" containerName="controller-manager" containerID="cri-o://8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d" gracePeriod=30 Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.075257 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn"] Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.075523 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" podUID="10465bd0-c38b-4d63-a680-6561b7c5c8fd" containerName="route-controller-manager" containerID="cri-o://439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e" gracePeriod=30 Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.481648 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.486134 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616692 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-client-ca\") pod \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616738 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz6ks\" (UniqueName: \"kubernetes.io/projected/f7d13876-6424-4b79-b3c1-7c8cec7b233c-kube-api-access-bz6ks\") pod \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616808 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-proxy-ca-bundles\") pod \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616830 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-config\") pod \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616895 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-client-ca\") pod \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616922 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjgw7\" (UniqueName: \"kubernetes.io/projected/10465bd0-c38b-4d63-a680-6561b7c5c8fd-kube-api-access-fjgw7\") pod \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616956 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7d13876-6424-4b79-b3c1-7c8cec7b233c-serving-cert\") pod \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.616991 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10465bd0-c38b-4d63-a680-6561b7c5c8fd-serving-cert\") pod \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\" (UID: \"10465bd0-c38b-4d63-a680-6561b7c5c8fd\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.617007 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-config\") pod \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\" (UID: \"f7d13876-6424-4b79-b3c1-7c8cec7b233c\") " Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.617448 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-client-ca" (OuterVolumeSpecName: "client-ca") pod "10465bd0-c38b-4d63-a680-6561b7c5c8fd" (UID: "10465bd0-c38b-4d63-a680-6561b7c5c8fd"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.617459 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-client-ca" (OuterVolumeSpecName: "client-ca") pod "f7d13876-6424-4b79-b3c1-7c8cec7b233c" (UID: "f7d13876-6424-4b79-b3c1-7c8cec7b233c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.617472 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-config" (OuterVolumeSpecName: "config") pod "10465bd0-c38b-4d63-a680-6561b7c5c8fd" (UID: "10465bd0-c38b-4d63-a680-6561b7c5c8fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.617585 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-config" (OuterVolumeSpecName: "config") pod "f7d13876-6424-4b79-b3c1-7c8cec7b233c" (UID: "f7d13876-6424-4b79-b3c1-7c8cec7b233c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.617914 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f7d13876-6424-4b79-b3c1-7c8cec7b233c" (UID: "f7d13876-6424-4b79-b3c1-7c8cec7b233c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.621316 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10465bd0-c38b-4d63-a680-6561b7c5c8fd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "10465bd0-c38b-4d63-a680-6561b7c5c8fd" (UID: "10465bd0-c38b-4d63-a680-6561b7c5c8fd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.621393 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10465bd0-c38b-4d63-a680-6561b7c5c8fd-kube-api-access-fjgw7" (OuterVolumeSpecName: "kube-api-access-fjgw7") pod "10465bd0-c38b-4d63-a680-6561b7c5c8fd" (UID: "10465bd0-c38b-4d63-a680-6561b7c5c8fd"). InnerVolumeSpecName "kube-api-access-fjgw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.627109 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7d13876-6424-4b79-b3c1-7c8cec7b233c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f7d13876-6424-4b79-b3c1-7c8cec7b233c" (UID: "f7d13876-6424-4b79-b3c1-7c8cec7b233c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.628867 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7d13876-6424-4b79-b3c1-7c8cec7b233c-kube-api-access-bz6ks" (OuterVolumeSpecName: "kube-api-access-bz6ks") pod "f7d13876-6424-4b79-b3c1-7c8cec7b233c" (UID: "f7d13876-6424-4b79-b3c1-7c8cec7b233c"). InnerVolumeSpecName "kube-api-access-bz6ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718136 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718168 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz6ks\" (UniqueName: \"kubernetes.io/projected/f7d13876-6424-4b79-b3c1-7c8cec7b233c-kube-api-access-bz6ks\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718189 4583 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718198 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718206 4583 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/10465bd0-c38b-4d63-a680-6561b7c5c8fd-client-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718217 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjgw7\" (UniqueName: \"kubernetes.io/projected/10465bd0-c38b-4d63-a680-6561b7c5c8fd-kube-api-access-fjgw7\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718224 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7d13876-6424-4b79-b3c1-7c8cec7b233c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718231 4583 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10465bd0-c38b-4d63-a680-6561b7c5c8fd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.718238 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7d13876-6424-4b79-b3c1-7c8cec7b233c-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.830803 4583 generic.go:334] "Generic (PLEG): container finished" podID="10465bd0-c38b-4d63-a680-6561b7c5c8fd" containerID="439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e" exitCode=0 Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.830862 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" event={"ID":"10465bd0-c38b-4d63-a680-6561b7c5c8fd","Type":"ContainerDied","Data":"439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e"} Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.830888 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" event={"ID":"10465bd0-c38b-4d63-a680-6561b7c5c8fd","Type":"ContainerDied","Data":"0e0213aef4ea0620bf306cc0e18cf0d6b592b24231360f1824aae8cd515ccee7"} Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.830903 4583 scope.go:117] "RemoveContainer" containerID="439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.830984 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.833112 4583 generic.go:334] "Generic (PLEG): container finished" podID="f7d13876-6424-4b79-b3c1-7c8cec7b233c" containerID="8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d" exitCode=0 Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.833153 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" event={"ID":"f7d13876-6424-4b79-b3c1-7c8cec7b233c","Type":"ContainerDied","Data":"8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d"} Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.833177 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" event={"ID":"f7d13876-6424-4b79-b3c1-7c8cec7b233c","Type":"ContainerDied","Data":"4260280f9033c8c1c49df1e0c6925bcc14b3c37b0eb4a17b991b73dfef19eb90"} Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.833219 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-846df49455-fdpns" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.853813 4583 scope.go:117] "RemoveContainer" containerID="439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e" Dec 09 15:03:36 crc kubenswrapper[4583]: E1209 15:03:36.854121 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e\": container with ID starting with 439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e not found: ID does not exist" containerID="439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.854151 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e"} err="failed to get container status \"439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e\": rpc error: code = NotFound desc = could not find container \"439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e\": container with ID starting with 439cc290c3048df6dc1a1f7f136f6270ac518fbc1a772e10a600ec285a132a4e not found: ID does not exist" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.854171 4583 scope.go:117] "RemoveContainer" containerID="8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.857619 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn"] Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.860234 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-647f6fbcc5-6fxvn"] Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.870972 4583 scope.go:117] "RemoveContainer" containerID="8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.871418 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-846df49455-fdpns"] Dec 09 15:03:36 crc kubenswrapper[4583]: E1209 15:03:36.872013 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d\": container with ID starting with 8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d not found: ID does not exist" containerID="8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.872051 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d"} err="failed to get container status \"8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d\": rpc error: code = NotFound desc = could not find container \"8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d\": container with ID starting with 8a4a1dbbd5d4b889dca71e226147f0bc26e143852e6cd31d73caf22f1f5ba27d not found: ID does not exist" Dec 09 15:03:36 crc kubenswrapper[4583]: I1209 15:03:36.873732 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-846df49455-fdpns"] Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.546080 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-44pwz"] Dec 09 15:03:37 crc kubenswrapper[4583]: E1209 15:03:37.546966 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7d13876-6424-4b79-b3c1-7c8cec7b233c" containerName="controller-manager" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.546988 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7d13876-6424-4b79-b3c1-7c8cec7b233c" containerName="controller-manager" Dec 09 15:03:37 crc kubenswrapper[4583]: E1209 15:03:37.547000 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10465bd0-c38b-4d63-a680-6561b7c5c8fd" containerName="route-controller-manager" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.547006 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="10465bd0-c38b-4d63-a680-6561b7c5c8fd" containerName="route-controller-manager" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.549062 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7d13876-6424-4b79-b3c1-7c8cec7b233c" containerName="controller-manager" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.549096 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="10465bd0-c38b-4d63-a680-6561b7c5c8fd" containerName="route-controller-manager" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.549632 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps"] Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.550955 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.553275 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.555354 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.556146 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.556548 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.558730 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-44pwz"] Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.559564 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.559738 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.559892 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.560019 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.560338 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.561589 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.561819 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps"] Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.562216 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.562326 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.562445 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.563411 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732097 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-proxy-ca-bundles\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732158 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-client-ca\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732189 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-config\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732218 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d006ca0-981b-4ce8-b5a9-20e29a09d665-config\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732250 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd179244-45a1-4341-aa59-90bada5df310-serving-cert\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732267 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2d006ca0-981b-4ce8-b5a9-20e29a09d665-client-ca\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732285 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d006ca0-981b-4ce8-b5a9-20e29a09d665-serving-cert\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732363 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdcfc\" (UniqueName: \"kubernetes.io/projected/2d006ca0-981b-4ce8-b5a9-20e29a09d665-kube-api-access-gdcfc\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.732450 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mf28\" (UniqueName: \"kubernetes.io/projected/dd179244-45a1-4341-aa59-90bada5df310-kube-api-access-4mf28\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834281 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2d006ca0-981b-4ce8-b5a9-20e29a09d665-client-ca\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834317 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd179244-45a1-4341-aa59-90bada5df310-serving-cert\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834334 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d006ca0-981b-4ce8-b5a9-20e29a09d665-serving-cert\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834358 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdcfc\" (UniqueName: \"kubernetes.io/projected/2d006ca0-981b-4ce8-b5a9-20e29a09d665-kube-api-access-gdcfc\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834385 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mf28\" (UniqueName: \"kubernetes.io/projected/dd179244-45a1-4341-aa59-90bada5df310-kube-api-access-4mf28\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834863 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-proxy-ca-bundles\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834894 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-client-ca\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834935 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-config\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.834956 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d006ca0-981b-4ce8-b5a9-20e29a09d665-config\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.836081 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-config\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.836095 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d006ca0-981b-4ce8-b5a9-20e29a09d665-config\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.836525 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2d006ca0-981b-4ce8-b5a9-20e29a09d665-client-ca\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.836798 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-client-ca\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.838654 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd179244-45a1-4341-aa59-90bada5df310-serving-cert\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.839333 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd179244-45a1-4341-aa59-90bada5df310-proxy-ca-bundles\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.847355 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mf28\" (UniqueName: \"kubernetes.io/projected/dd179244-45a1-4341-aa59-90bada5df310-kube-api-access-4mf28\") pod \"controller-manager-86b88ccc-44pwz\" (UID: \"dd179244-45a1-4341-aa59-90bada5df310\") " pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.847889 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdcfc\" (UniqueName: \"kubernetes.io/projected/2d006ca0-981b-4ce8-b5a9-20e29a09d665-kube-api-access-gdcfc\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.849647 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d006ca0-981b-4ce8-b5a9-20e29a09d665-serving-cert\") pod \"route-controller-manager-54cc898447-xfpps\" (UID: \"2d006ca0-981b-4ce8-b5a9-20e29a09d665\") " pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.873217 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:37 crc kubenswrapper[4583]: I1209 15:03:37.882140 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.083914 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10465bd0-c38b-4d63-a680-6561b7c5c8fd" path="/var/lib/kubelet/pods/10465bd0-c38b-4d63-a680-6561b7c5c8fd/volumes" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.085089 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7d13876-6424-4b79-b3c1-7c8cec7b233c" path="/var/lib/kubelet/pods/f7d13876-6424-4b79-b3c1-7c8cec7b233c/volumes" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.221769 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86b88ccc-44pwz"] Dec 09 15:03:38 crc kubenswrapper[4583]: W1209 15:03:38.224951 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd179244_45a1_4341_aa59_90bada5df310.slice/crio-094600c19864331c6fd2b36bd84f35856db4c7ead2e8c229d2a40ef0124df538 WatchSource:0}: Error finding container 094600c19864331c6fd2b36bd84f35856db4c7ead2e8c229d2a40ef0124df538: Status 404 returned error can't find the container with id 094600c19864331c6fd2b36bd84f35856db4c7ead2e8c229d2a40ef0124df538 Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.251467 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps"] Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.846681 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" event={"ID":"dd179244-45a1-4341-aa59-90bada5df310","Type":"ContainerStarted","Data":"f13a6fbec299977e2e5afcaed4c2a803863a969589e940c017f396ab0b1dce42"} Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.846848 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" event={"ID":"dd179244-45a1-4341-aa59-90bada5df310","Type":"ContainerStarted","Data":"094600c19864331c6fd2b36bd84f35856db4c7ead2e8c229d2a40ef0124df538"} Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.846864 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.848020 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" event={"ID":"2d006ca0-981b-4ce8-b5a9-20e29a09d665","Type":"ContainerStarted","Data":"c7f3db58d229e22c3190fa55636dc73e3528dd8aa5094ef39a67a09672ae620b"} Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.848472 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" event={"ID":"2d006ca0-981b-4ce8-b5a9-20e29a09d665","Type":"ContainerStarted","Data":"099e8401529cb501e674e0d08e4c3a7b2d9344a498960fac5d122d6767b7564d"} Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.848498 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.850849 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.854298 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.859524 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-86b88ccc-44pwz" podStartSLOduration=2.85951477 podStartE2EDuration="2.85951477s" podCreationTimestamp="2025-12-09 15:03:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:38.858279267 +0000 UTC m=+317.065728356" watchObservedRunningTime="2025-12-09 15:03:38.85951477 +0000 UTC m=+317.066963858" Dec 09 15:03:38 crc kubenswrapper[4583]: I1209 15:03:38.887847 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-54cc898447-xfpps" podStartSLOduration=2.887831785 podStartE2EDuration="2.887831785s" podCreationTimestamp="2025-12-09 15:03:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:03:38.873321447 +0000 UTC m=+317.080770535" watchObservedRunningTime="2025-12-09 15:03:38.887831785 +0000 UTC m=+317.095280873" Dec 09 15:03:40 crc kubenswrapper[4583]: I1209 15:03:40.217489 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-7llk8" Dec 09 15:03:40 crc kubenswrapper[4583]: I1209 15:03:40.251082 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-csjlq"] Dec 09 15:03:59 crc kubenswrapper[4583]: I1209 15:03:59.391686 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:03:59 crc kubenswrapper[4583]: I1209 15:03:59.392101 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.276184 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" podUID="f6163f29-831f-435e-8410-9f08f394ca5f" containerName="registry" containerID="cri-o://d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4" gracePeriod=30 Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.629621 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.763822 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-bound-sa-token\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.763871 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-registry-certificates\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.763914 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-trusted-ca\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.763953 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-registry-tls\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.764004 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f6163f29-831f-435e-8410-9f08f394ca5f-installation-pull-secrets\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.764519 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.764575 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.764648 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.764675 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69l7c\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-kube-api-access-69l7c\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.764702 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f6163f29-831f-435e-8410-9f08f394ca5f-ca-trust-extracted\") pod \"f6163f29-831f-435e-8410-9f08f394ca5f\" (UID: \"f6163f29-831f-435e-8410-9f08f394ca5f\") " Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.765051 4583 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.765074 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f6163f29-831f-435e-8410-9f08f394ca5f-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.768119 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6163f29-831f-435e-8410-9f08f394ca5f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.768170 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.768409 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-kube-api-access-69l7c" (OuterVolumeSpecName: "kube-api-access-69l7c") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "kube-api-access-69l7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.768565 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.770766 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.778076 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6163f29-831f-435e-8410-9f08f394ca5f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "f6163f29-831f-435e-8410-9f08f394ca5f" (UID: "f6163f29-831f-435e-8410-9f08f394ca5f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.865798 4583 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.865821 4583 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f6163f29-831f-435e-8410-9f08f394ca5f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.865831 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69l7c\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-kube-api-access-69l7c\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.865838 4583 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f6163f29-831f-435e-8410-9f08f394ca5f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.865846 4583 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f6163f29-831f-435e-8410-9f08f394ca5f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.953786 4583 generic.go:334] "Generic (PLEG): container finished" podID="f6163f29-831f-435e-8410-9f08f394ca5f" containerID="d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4" exitCode=0 Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.953820 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" event={"ID":"f6163f29-831f-435e-8410-9f08f394ca5f","Type":"ContainerDied","Data":"d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4"} Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.953831 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.953846 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-csjlq" event={"ID":"f6163f29-831f-435e-8410-9f08f394ca5f","Type":"ContainerDied","Data":"acb431f1812c120b9eaf5eb6566263c9396c5ab05301413662e297fb92b160e7"} Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.953863 4583 scope.go:117] "RemoveContainer" containerID="d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.967679 4583 scope.go:117] "RemoveContainer" containerID="d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4" Dec 09 15:04:05 crc kubenswrapper[4583]: E1209 15:04:05.968015 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4\": container with ID starting with d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4 not found: ID does not exist" containerID="d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.968042 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4"} err="failed to get container status \"d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4\": rpc error: code = NotFound desc = could not find container \"d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4\": container with ID starting with d80e4cce7a63a74dd1c26aba8f8598f9d0057b388e5031cd458d7c7ca8ac98c4 not found: ID does not exist" Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.973647 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-csjlq"] Dec 09 15:04:05 crc kubenswrapper[4583]: I1209 15:04:05.975928 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-csjlq"] Dec 09 15:04:06 crc kubenswrapper[4583]: I1209 15:04:06.083136 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6163f29-831f-435e-8410-9f08f394ca5f" path="/var/lib/kubelet/pods/f6163f29-831f-435e-8410-9f08f394ca5f/volumes" Dec 09 15:04:29 crc kubenswrapper[4583]: I1209 15:04:29.392001 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:04:29 crc kubenswrapper[4583]: I1209 15:04:29.392536 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:04:59 crc kubenswrapper[4583]: I1209 15:04:59.391724 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:04:59 crc kubenswrapper[4583]: I1209 15:04:59.392133 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:04:59 crc kubenswrapper[4583]: I1209 15:04:59.392170 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:04:59 crc kubenswrapper[4583]: I1209 15:04:59.392513 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3cd7ba3758e939105e3e602682419ab4dde31e2b1948da8cf196038e53ae5c0"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:04:59 crc kubenswrapper[4583]: I1209 15:04:59.392562 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://a3cd7ba3758e939105e3e602682419ab4dde31e2b1948da8cf196038e53ae5c0" gracePeriod=600 Dec 09 15:05:00 crc kubenswrapper[4583]: I1209 15:05:00.158412 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="a3cd7ba3758e939105e3e602682419ab4dde31e2b1948da8cf196038e53ae5c0" exitCode=0 Dec 09 15:05:00 crc kubenswrapper[4583]: I1209 15:05:00.158484 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"a3cd7ba3758e939105e3e602682419ab4dde31e2b1948da8cf196038e53ae5c0"} Dec 09 15:05:00 crc kubenswrapper[4583]: I1209 15:05:00.158659 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"e5d33c84ff6a93e32880d70a3785cf1929995f7d39b05755a27bd726e3b26c85"} Dec 09 15:05:00 crc kubenswrapper[4583]: I1209 15:05:00.158679 4583 scope.go:117] "RemoveContainer" containerID="bcb4e44202380ba0fa2e8053a2b606f06d48608bdbb45a0e6d2b40c3e09d1815" Dec 09 15:06:59 crc kubenswrapper[4583]: I1209 15:06:59.391719 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:06:59 crc kubenswrapper[4583]: I1209 15:06:59.392158 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.967863 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dwsfd"] Dec 09 15:07:16 crc kubenswrapper[4583]: E1209 15:07:16.968358 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6163f29-831f-435e-8410-9f08f394ca5f" containerName="registry" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.968369 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6163f29-831f-435e-8410-9f08f394ca5f" containerName="registry" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.968445 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6163f29-831f-435e-8410-9f08f394ca5f" containerName="registry" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.968740 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.970214 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.971145 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.971147 4583 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-szkqc" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.975691 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dwsfd"] Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.979440 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6fm4x"] Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.980043 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-6fm4x" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.981689 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-7j9s4"] Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.982209 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.983540 4583 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-tn759" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.983927 4583 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-rpx9f" Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.991620 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6fm4x"] Dec 09 15:07:16 crc kubenswrapper[4583]: I1209 15:07:16.997053 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-7j9s4"] Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.053533 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w68hb\" (UniqueName: \"kubernetes.io/projected/56190539-a710-4685-8aed-321aa76e1b89-kube-api-access-w68hb\") pod \"cert-manager-5b446d88c5-6fm4x\" (UID: \"56190539-a710-4685-8aed-321aa76e1b89\") " pod="cert-manager/cert-manager-5b446d88c5-6fm4x" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.053590 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4pwt\" (UniqueName: \"kubernetes.io/projected/cb6cb5b2-1b81-4961-a765-2da559fc1e69-kube-api-access-t4pwt\") pod \"cert-manager-webhook-5655c58dd6-7j9s4\" (UID: \"cb6cb5b2-1b81-4961-a765-2da559fc1e69\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.053642 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9w24\" (UniqueName: \"kubernetes.io/projected/f777d618-579e-4266-891d-694f44f53647-kube-api-access-x9w24\") pod \"cert-manager-cainjector-7f985d654d-dwsfd\" (UID: \"f777d618-579e-4266-891d-694f44f53647\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.154717 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w68hb\" (UniqueName: \"kubernetes.io/projected/56190539-a710-4685-8aed-321aa76e1b89-kube-api-access-w68hb\") pod \"cert-manager-5b446d88c5-6fm4x\" (UID: \"56190539-a710-4685-8aed-321aa76e1b89\") " pod="cert-manager/cert-manager-5b446d88c5-6fm4x" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.154787 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4pwt\" (UniqueName: \"kubernetes.io/projected/cb6cb5b2-1b81-4961-a765-2da559fc1e69-kube-api-access-t4pwt\") pod \"cert-manager-webhook-5655c58dd6-7j9s4\" (UID: \"cb6cb5b2-1b81-4961-a765-2da559fc1e69\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.154837 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9w24\" (UniqueName: \"kubernetes.io/projected/f777d618-579e-4266-891d-694f44f53647-kube-api-access-x9w24\") pod \"cert-manager-cainjector-7f985d654d-dwsfd\" (UID: \"f777d618-579e-4266-891d-694f44f53647\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.169596 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4pwt\" (UniqueName: \"kubernetes.io/projected/cb6cb5b2-1b81-4961-a765-2da559fc1e69-kube-api-access-t4pwt\") pod \"cert-manager-webhook-5655c58dd6-7j9s4\" (UID: \"cb6cb5b2-1b81-4961-a765-2da559fc1e69\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.169639 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w68hb\" (UniqueName: \"kubernetes.io/projected/56190539-a710-4685-8aed-321aa76e1b89-kube-api-access-w68hb\") pod \"cert-manager-5b446d88c5-6fm4x\" (UID: \"56190539-a710-4685-8aed-321aa76e1b89\") " pod="cert-manager/cert-manager-5b446d88c5-6fm4x" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.170097 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9w24\" (UniqueName: \"kubernetes.io/projected/f777d618-579e-4266-891d-694f44f53647-kube-api-access-x9w24\") pod \"cert-manager-cainjector-7f985d654d-dwsfd\" (UID: \"f777d618-579e-4266-891d-694f44f53647\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.280414 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.290232 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-6fm4x" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.296059 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.416885 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dwsfd"] Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.429311 4583 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.641279 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" event={"ID":"f777d618-579e-4266-891d-694f44f53647","Type":"ContainerStarted","Data":"02c5c74c2f981ffad32b6ac73f2fb01a124a208088d6982fa709e4d28cb64469"} Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.663304 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-7j9s4"] Dec 09 15:07:17 crc kubenswrapper[4583]: W1209 15:07:17.667059 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb6cb5b2_1b81_4961_a765_2da559fc1e69.slice/crio-f88a13d60fdb2665700e827cbc792b02d49b458fbae33c0d5f95c8061146edd5 WatchSource:0}: Error finding container f88a13d60fdb2665700e827cbc792b02d49b458fbae33c0d5f95c8061146edd5: Status 404 returned error can't find the container with id f88a13d60fdb2665700e827cbc792b02d49b458fbae33c0d5f95c8061146edd5 Dec 09 15:07:17 crc kubenswrapper[4583]: I1209 15:07:17.668590 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6fm4x"] Dec 09 15:07:17 crc kubenswrapper[4583]: W1209 15:07:17.672527 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56190539_a710_4685_8aed_321aa76e1b89.slice/crio-78ac432eacc3373b6c9761412fba2cf76a86aa2ae27725e8bfa04ee93901d240 WatchSource:0}: Error finding container 78ac432eacc3373b6c9761412fba2cf76a86aa2ae27725e8bfa04ee93901d240: Status 404 returned error can't find the container with id 78ac432eacc3373b6c9761412fba2cf76a86aa2ae27725e8bfa04ee93901d240 Dec 09 15:07:18 crc kubenswrapper[4583]: I1209 15:07:18.645178 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" event={"ID":"cb6cb5b2-1b81-4961-a765-2da559fc1e69","Type":"ContainerStarted","Data":"f88a13d60fdb2665700e827cbc792b02d49b458fbae33c0d5f95c8061146edd5"} Dec 09 15:07:18 crc kubenswrapper[4583]: I1209 15:07:18.646088 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-6fm4x" event={"ID":"56190539-a710-4685-8aed-321aa76e1b89","Type":"ContainerStarted","Data":"78ac432eacc3373b6c9761412fba2cf76a86aa2ae27725e8bfa04ee93901d240"} Dec 09 15:07:19 crc kubenswrapper[4583]: I1209 15:07:19.652639 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" event={"ID":"f777d618-579e-4266-891d-694f44f53647","Type":"ContainerStarted","Data":"c7982b067849da085d0640501f54b6ed8988c47739d9930805f56ddf91a34706"} Dec 09 15:07:19 crc kubenswrapper[4583]: I1209 15:07:19.664366 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-dwsfd" podStartSLOduration=2.141916616 podStartE2EDuration="3.664352651s" podCreationTimestamp="2025-12-09 15:07:16 +0000 UTC" firstStartedPulling="2025-12-09 15:07:17.429057877 +0000 UTC m=+535.636506965" lastFinishedPulling="2025-12-09 15:07:18.951493912 +0000 UTC m=+537.158943000" observedRunningTime="2025-12-09 15:07:19.661965265 +0000 UTC m=+537.869414353" watchObservedRunningTime="2025-12-09 15:07:19.664352651 +0000 UTC m=+537.871801739" Dec 09 15:07:20 crc kubenswrapper[4583]: I1209 15:07:20.657830 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" event={"ID":"cb6cb5b2-1b81-4961-a765-2da559fc1e69","Type":"ContainerStarted","Data":"71e06358a237c8d5134cb2f08bd9d1af119cf8ad9d2f4748fa91c37a659dc516"} Dec 09 15:07:20 crc kubenswrapper[4583]: I1209 15:07:20.658085 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:20 crc kubenswrapper[4583]: I1209 15:07:20.659934 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-6fm4x" event={"ID":"56190539-a710-4685-8aed-321aa76e1b89","Type":"ContainerStarted","Data":"f0a8ac7f57ed7dd0beee39786b5407282cd9964626c23355a60c848d9747fbcd"} Dec 09 15:07:20 crc kubenswrapper[4583]: I1209 15:07:20.669268 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" podStartSLOduration=2.274859686 podStartE2EDuration="4.669256084s" podCreationTimestamp="2025-12-09 15:07:16 +0000 UTC" firstStartedPulling="2025-12-09 15:07:17.668730053 +0000 UTC m=+535.876179141" lastFinishedPulling="2025-12-09 15:07:20.063126451 +0000 UTC m=+538.270575539" observedRunningTime="2025-12-09 15:07:20.667363288 +0000 UTC m=+538.874812376" watchObservedRunningTime="2025-12-09 15:07:20.669256084 +0000 UTC m=+538.876705171" Dec 09 15:07:20 crc kubenswrapper[4583]: I1209 15:07:20.678845 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-6fm4x" podStartSLOduration=2.2411067669999998 podStartE2EDuration="4.678837508s" podCreationTimestamp="2025-12-09 15:07:16 +0000 UTC" firstStartedPulling="2025-12-09 15:07:17.67413864 +0000 UTC m=+535.881587717" lastFinishedPulling="2025-12-09 15:07:20.11186937 +0000 UTC m=+538.319318458" observedRunningTime="2025-12-09 15:07:20.678581145 +0000 UTC m=+538.886030234" watchObservedRunningTime="2025-12-09 15:07:20.678837508 +0000 UTC m=+538.886286596" Dec 09 15:07:22 crc kubenswrapper[4583]: I1209 15:07:22.228427 4583 scope.go:117] "RemoveContainer" containerID="43d01cc338ba81167f0721b9efb0a947353e406521c2c01474e81839992ab4cd" Dec 09 15:07:22 crc kubenswrapper[4583]: I1209 15:07:22.240955 4583 scope.go:117] "RemoveContainer" containerID="c3ebc45d1291e85361c66fb94ab746ac156bd9e8fc6eb1f0d6cdf3daba95325c" Dec 09 15:07:22 crc kubenswrapper[4583]: I1209 15:07:22.255859 4583 scope.go:117] "RemoveContainer" containerID="aa11777b7d0ce523bca025ed7094887326d10c3736715c04a64ead10b53bae3e" Dec 09 15:07:27 crc kubenswrapper[4583]: I1209 15:07:27.298632 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-7j9s4" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.601621 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5rdc6"] Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602219 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-controller" containerID="cri-o://ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602299 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="nbdb" containerID="cri-o://a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602338 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="northd" containerID="cri-o://34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602372 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="sbdb" containerID="cri-o://f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602441 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-node" containerID="cri-o://0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602299 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.602518 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-acl-logging" containerID="cri-o://8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.633237 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" containerID="cri-o://b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" gracePeriod=30 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.698031 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/2.log" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.698513 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/1.log" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.698550 4583 generic.go:334] "Generic (PLEG): container finished" podID="c240c9e3-573f-4237-b433-deb0f21e0123" containerID="dfd7f9f7c2999475afdbb72006e6582db9d7b1ea6a414d3091a6f918b7c37df9" exitCode=2 Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.698576 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerDied","Data":"dfd7f9f7c2999475afdbb72006e6582db9d7b1ea6a414d3091a6f918b7c37df9"} Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.698604 4583 scope.go:117] "RemoveContainer" containerID="ebe7bfc62594d54985740d925a79d8c59ab2338ae4c1d7693a5331fba1cbeb8a" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.699038 4583 scope.go:117] "RemoveContainer" containerID="dfd7f9f7c2999475afdbb72006e6582db9d7b1ea6a414d3091a6f918b7c37df9" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.699179 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-j2sh5_openshift-multus(c240c9e3-573f-4237-b433-deb0f21e0123)\"" pod="openshift-multus/multus-j2sh5" podUID="c240c9e3-573f-4237-b433-deb0f21e0123" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.871374 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/3.log" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.873981 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovn-acl-logging/0.log" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.874555 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovn-controller/0.log" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.875108 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.915930 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-64sps"] Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916165 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="nbdb" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916185 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="nbdb" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916195 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916203 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916213 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916218 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916227 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="northd" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916233 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="northd" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916239 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916246 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916254 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916260 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916276 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-ovn-metrics" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916282 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-ovn-metrics" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916291 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kubecfg-setup" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916298 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kubecfg-setup" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916309 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-acl-logging" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916316 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-acl-logging" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916325 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-node" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916331 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-node" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916339 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="sbdb" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916345 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="sbdb" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916437 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-ovn-metrics" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916448 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="sbdb" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916456 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916471 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916478 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="nbdb" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916485 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916492 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916499 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="northd" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916505 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916510 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovn-acl-logging" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916519 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="kube-rbac-proxy-node" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916635 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916642 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916730 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: E1209 15:07:28.916843 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.916850 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerName="ovnkube-controller" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.918249 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.998620 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovn-node-metrics-cert\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.998869 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-var-lib-openvswitch\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.998959 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.998992 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-log-socket\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999095 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-env-overrides\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999141 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-netns\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999188 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-config\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999229 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-bin\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999292 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999316 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-var-lib-cni-networks-ovn-kubernetes\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999342 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-etc-openvswitch\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999363 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-kubelet\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999365 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999373 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999400 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-systemd\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999425 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999435 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54w5r\" (UniqueName: \"kubernetes.io/projected/2acdc1e0-3af8-41bf-958f-3cd291da42db-kube-api-access-54w5r\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999484 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999504 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-systemd-units\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999528 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999559 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-netd\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999586 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-ovn-kubernetes\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999626 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-node-log\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999630 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999656 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-script-lib\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999684 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-ovn\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999731 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-slash\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999655 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999673 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-node-log" (OuterVolumeSpecName: "node-log") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999668 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999806 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-openvswitch\") pod \"2acdc1e0-3af8-41bf-958f-3cd291da42db\" (UID: \"2acdc1e0-3af8-41bf-958f-3cd291da42db\") " Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999808 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-slash" (OuterVolumeSpecName: "host-slash") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999838 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:28 crc kubenswrapper[4583]: I1209 15:07:28.999774 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:28.999304 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-log-socket" (OuterVolumeSpecName: "log-socket") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000029 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000097 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000649 4583 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000673 4583 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000686 4583 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000699 4583 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-node-log\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000708 4583 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000718 4583 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000727 4583 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-slash\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000737 4583 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000766 4583 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000778 4583 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-log-socket\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000788 4583 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000796 4583 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000804 4583 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000813 4583 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000823 4583 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000834 4583 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.000845 4583 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.005234 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.005498 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2acdc1e0-3af8-41bf-958f-3cd291da42db-kube-api-access-54w5r" (OuterVolumeSpecName: "kube-api-access-54w5r") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "kube-api-access-54w5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.011283 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "2acdc1e0-3af8-41bf-958f-3cd291da42db" (UID: "2acdc1e0-3af8-41bf-958f-3cd291da42db"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101588 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-etc-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101633 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-run-ovn-kubernetes\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101659 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovn-node-metrics-cert\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101688 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101705 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-env-overrides\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101724 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-ovn\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101758 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-kubelet\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101775 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsw2t\" (UniqueName: \"kubernetes.io/projected/52746f26-898e-4d1e-98a6-e4ce6791a0eb-kube-api-access-qsw2t\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101815 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-slash\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101842 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-cni-netd\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101868 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-var-lib-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101891 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-cni-bin\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.101918 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102013 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-node-log\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102065 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovnkube-script-lib\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102084 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-systemd\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102122 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-log-socket\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102198 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovnkube-config\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102225 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-systemd-units\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102245 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-run-netns\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102321 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54w5r\" (UniqueName: \"kubernetes.io/projected/2acdc1e0-3af8-41bf-958f-3cd291da42db-kube-api-access-54w5r\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102365 4583 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2acdc1e0-3af8-41bf-958f-3cd291da42db-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.102377 4583 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2acdc1e0-3af8-41bf-958f-3cd291da42db-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.202997 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-node-log\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203042 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovnkube-script-lib\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203072 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-systemd\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203098 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-log-socket\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203132 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovnkube-config\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203150 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-systemd-units\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203165 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-run-netns\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203183 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-etc-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203202 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-run-ovn-kubernetes\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203225 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovn-node-metrics-cert\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203241 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203258 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-env-overrides\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203276 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-ovn\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203292 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-kubelet\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203305 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsw2t\" (UniqueName: \"kubernetes.io/projected/52746f26-898e-4d1e-98a6-e4ce6791a0eb-kube-api-access-qsw2t\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203322 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-slash\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203337 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-cni-netd\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203338 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-node-log\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203353 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-var-lib-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203387 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-var-lib-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203404 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-cni-bin\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203415 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-systemd-units\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203436 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-run-netns\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203435 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203460 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203502 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-etc-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203506 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-cni-bin\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203529 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-run-ovn-kubernetes\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203558 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-slash\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203585 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-kubelet\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203629 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-openvswitch\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203864 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-systemd\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203897 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-log-socket\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203899 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-host-cni-netd\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203922 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/52746f26-898e-4d1e-98a6-e4ce6791a0eb-run-ovn\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.203990 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovnkube-config\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.204004 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovnkube-script-lib\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.204082 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/52746f26-898e-4d1e-98a6-e4ce6791a0eb-env-overrides\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.206047 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/52746f26-898e-4d1e-98a6-e4ce6791a0eb-ovn-node-metrics-cert\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.215906 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsw2t\" (UniqueName: \"kubernetes.io/projected/52746f26-898e-4d1e-98a6-e4ce6791a0eb-kube-api-access-qsw2t\") pod \"ovnkube-node-64sps\" (UID: \"52746f26-898e-4d1e-98a6-e4ce6791a0eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.230278 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.391346 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.391410 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.706248 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/2.log" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.708874 4583 generic.go:334] "Generic (PLEG): container finished" podID="52746f26-898e-4d1e-98a6-e4ce6791a0eb" containerID="8dfc8d0bb6c68ce75790226d5bc5e6b3a7b6f0d7575109ee7ff7e65607efd1ff" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.708946 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerDied","Data":"8dfc8d0bb6c68ce75790226d5bc5e6b3a7b6f0d7575109ee7ff7e65607efd1ff"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.709020 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"c1f16b23c43b40a75849f15f785d0b651e85bf6fd0be76148c19caeb945da8ec"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.713071 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovnkube-controller/3.log" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.717959 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovn-acl-logging/0.log" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.718579 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5rdc6_2acdc1e0-3af8-41bf-958f-3cd291da42db/ovn-controller/0.log" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719209 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719242 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719279 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719300 4583 scope.go:117] "RemoveContainer" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719321 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719251 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719359 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719369 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719376 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719382 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" exitCode=0 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719388 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" exitCode=143 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719395 4583 generic.go:334] "Generic (PLEG): container finished" podID="2acdc1e0-3af8-41bf-958f-3cd291da42db" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" exitCode=143 Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719406 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719421 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719431 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719441 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719453 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719475 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719480 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719485 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719490 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719495 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719500 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719504 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719509 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719515 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719522 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719527 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719532 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719536 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719540 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719545 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719550 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719554 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719559 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719563 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719569 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719576 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719581 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719586 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719590 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719594 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719598 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719603 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719608 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719612 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719616 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719622 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5rdc6" event={"ID":"2acdc1e0-3af8-41bf-958f-3cd291da42db","Type":"ContainerDied","Data":"5da86cc75f95cab353e389b07674cc2d85c9f9aceba024d15907ce3d5ee7025f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719629 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719635 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719640 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719644 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719649 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719653 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719657 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719662 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719666 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.719670 4583 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.730945 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.749349 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5rdc6"] Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.752187 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5rdc6"] Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.754857 4583 scope.go:117] "RemoveContainer" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.768010 4583 scope.go:117] "RemoveContainer" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.791418 4583 scope.go:117] "RemoveContainer" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.817945 4583 scope.go:117] "RemoveContainer" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.828770 4583 scope.go:117] "RemoveContainer" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.839512 4583 scope.go:117] "RemoveContainer" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.852344 4583 scope.go:117] "RemoveContainer" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.868383 4583 scope.go:117] "RemoveContainer" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.889441 4583 scope.go:117] "RemoveContainer" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.889985 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": container with ID starting with b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47 not found: ID does not exist" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.890016 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} err="failed to get container status \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": rpc error: code = NotFound desc = could not find container \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": container with ID starting with b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.890042 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.890356 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": container with ID starting with b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f not found: ID does not exist" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.890379 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} err="failed to get container status \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": rpc error: code = NotFound desc = could not find container \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": container with ID starting with b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.890396 4583 scope.go:117] "RemoveContainer" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.890835 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": container with ID starting with f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f not found: ID does not exist" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.890856 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} err="failed to get container status \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": rpc error: code = NotFound desc = could not find container \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": container with ID starting with f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.890869 4583 scope.go:117] "RemoveContainer" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.891313 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": container with ID starting with a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852 not found: ID does not exist" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.891352 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} err="failed to get container status \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": rpc error: code = NotFound desc = could not find container \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": container with ID starting with a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.891389 4583 scope.go:117] "RemoveContainer" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.891693 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": container with ID starting with 34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937 not found: ID does not exist" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.891720 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} err="failed to get container status \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": rpc error: code = NotFound desc = could not find container \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": container with ID starting with 34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.891736 4583 scope.go:117] "RemoveContainer" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.892025 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": container with ID starting with fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba not found: ID does not exist" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892048 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} err="failed to get container status \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": rpc error: code = NotFound desc = could not find container \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": container with ID starting with fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892061 4583 scope.go:117] "RemoveContainer" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.892327 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": container with ID starting with 0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2 not found: ID does not exist" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892362 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} err="failed to get container status \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": rpc error: code = NotFound desc = could not find container \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": container with ID starting with 0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892384 4583 scope.go:117] "RemoveContainer" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.892656 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": container with ID starting with 8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d not found: ID does not exist" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892678 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} err="failed to get container status \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": rpc error: code = NotFound desc = could not find container \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": container with ID starting with 8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892692 4583 scope.go:117] "RemoveContainer" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.892914 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": container with ID starting with ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd not found: ID does not exist" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892941 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} err="failed to get container status \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": rpc error: code = NotFound desc = could not find container \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": container with ID starting with ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.892959 4583 scope.go:117] "RemoveContainer" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" Dec 09 15:07:29 crc kubenswrapper[4583]: E1209 15:07:29.893175 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": container with ID starting with 80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee not found: ID does not exist" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.893198 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} err="failed to get container status \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": rpc error: code = NotFound desc = could not find container \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": container with ID starting with 80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.893211 4583 scope.go:117] "RemoveContainer" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.893678 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} err="failed to get container status \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": rpc error: code = NotFound desc = could not find container \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": container with ID starting with b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.893706 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.893957 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} err="failed to get container status \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": rpc error: code = NotFound desc = could not find container \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": container with ID starting with b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.893975 4583 scope.go:117] "RemoveContainer" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.894279 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} err="failed to get container status \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": rpc error: code = NotFound desc = could not find container \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": container with ID starting with f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.894306 4583 scope.go:117] "RemoveContainer" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.894622 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} err="failed to get container status \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": rpc error: code = NotFound desc = could not find container \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": container with ID starting with a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.894642 4583 scope.go:117] "RemoveContainer" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.894942 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} err="failed to get container status \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": rpc error: code = NotFound desc = could not find container \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": container with ID starting with 34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.894960 4583 scope.go:117] "RemoveContainer" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895199 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} err="failed to get container status \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": rpc error: code = NotFound desc = could not find container \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": container with ID starting with fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895225 4583 scope.go:117] "RemoveContainer" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895477 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} err="failed to get container status \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": rpc error: code = NotFound desc = could not find container \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": container with ID starting with 0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895497 4583 scope.go:117] "RemoveContainer" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895725 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} err="failed to get container status \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": rpc error: code = NotFound desc = could not find container \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": container with ID starting with 8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895762 4583 scope.go:117] "RemoveContainer" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.895992 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} err="failed to get container status \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": rpc error: code = NotFound desc = could not find container \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": container with ID starting with ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896010 4583 scope.go:117] "RemoveContainer" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896251 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} err="failed to get container status \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": rpc error: code = NotFound desc = could not find container \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": container with ID starting with 80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896277 4583 scope.go:117] "RemoveContainer" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896499 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} err="failed to get container status \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": rpc error: code = NotFound desc = could not find container \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": container with ID starting with b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896521 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896734 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} err="failed to get container status \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": rpc error: code = NotFound desc = could not find container \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": container with ID starting with b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.896790 4583 scope.go:117] "RemoveContainer" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897010 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} err="failed to get container status \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": rpc error: code = NotFound desc = could not find container \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": container with ID starting with f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897028 4583 scope.go:117] "RemoveContainer" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897290 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} err="failed to get container status \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": rpc error: code = NotFound desc = could not find container \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": container with ID starting with a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897312 4583 scope.go:117] "RemoveContainer" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897522 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} err="failed to get container status \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": rpc error: code = NotFound desc = could not find container \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": container with ID starting with 34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897542 4583 scope.go:117] "RemoveContainer" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897831 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} err="failed to get container status \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": rpc error: code = NotFound desc = could not find container \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": container with ID starting with fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.897853 4583 scope.go:117] "RemoveContainer" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898102 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} err="failed to get container status \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": rpc error: code = NotFound desc = could not find container \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": container with ID starting with 0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898119 4583 scope.go:117] "RemoveContainer" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898323 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} err="failed to get container status \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": rpc error: code = NotFound desc = could not find container \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": container with ID starting with 8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898342 4583 scope.go:117] "RemoveContainer" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898633 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} err="failed to get container status \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": rpc error: code = NotFound desc = could not find container \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": container with ID starting with ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898668 4583 scope.go:117] "RemoveContainer" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898907 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} err="failed to get container status \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": rpc error: code = NotFound desc = could not find container \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": container with ID starting with 80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.898927 4583 scope.go:117] "RemoveContainer" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899139 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} err="failed to get container status \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": rpc error: code = NotFound desc = could not find container \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": container with ID starting with b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899158 4583 scope.go:117] "RemoveContainer" containerID="b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899360 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f"} err="failed to get container status \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": rpc error: code = NotFound desc = could not find container \"b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f\": container with ID starting with b17ca7f01be55ded7abd3ee83acc064a18643b1b5095b68cb665207134108e0f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899378 4583 scope.go:117] "RemoveContainer" containerID="f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899590 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f"} err="failed to get container status \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": rpc error: code = NotFound desc = could not find container \"f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f\": container with ID starting with f4c7d2404bb04aa2d4a9129903ab448f7e46a56b32fc121b5f549e351c4a923f not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899609 4583 scope.go:117] "RemoveContainer" containerID="a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899815 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852"} err="failed to get container status \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": rpc error: code = NotFound desc = could not find container \"a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852\": container with ID starting with a79385047fb71026ecf1a87d3b4642a3fa5bc7e2f9189ecce6607c2b1c384852 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.899833 4583 scope.go:117] "RemoveContainer" containerID="34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900039 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937"} err="failed to get container status \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": rpc error: code = NotFound desc = could not find container \"34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937\": container with ID starting with 34d7becf3efa5e1b967649b175f076190746ada1256f96cac8a51c9b43f20937 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900056 4583 scope.go:117] "RemoveContainer" containerID="fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900258 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba"} err="failed to get container status \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": rpc error: code = NotFound desc = could not find container \"fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba\": container with ID starting with fbba8ae084884154207acb7194156d8f92cd92acc851a02e169d579a022a62ba not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900275 4583 scope.go:117] "RemoveContainer" containerID="0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900476 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2"} err="failed to get container status \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": rpc error: code = NotFound desc = could not find container \"0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2\": container with ID starting with 0919fd0c5984212902e56b8fd7565949cf8ad34de381aa4527fcaed4a0b826c2 not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900496 4583 scope.go:117] "RemoveContainer" containerID="8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900757 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d"} err="failed to get container status \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": rpc error: code = NotFound desc = could not find container \"8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d\": container with ID starting with 8690c3e14c79f0a91484b3e523e67a0128550fc26fec98438b428fe268f9da1d not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900776 4583 scope.go:117] "RemoveContainer" containerID="ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900977 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd"} err="failed to get container status \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": rpc error: code = NotFound desc = could not find container \"ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd\": container with ID starting with ed04f0302be55993f34feb1b7b45a64160e3aaf197aad512655b900f620c28bd not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.900994 4583 scope.go:117] "RemoveContainer" containerID="80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.901176 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee"} err="failed to get container status \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": rpc error: code = NotFound desc = could not find container \"80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee\": container with ID starting with 80478d71340f6bc85a65af5252cc1d22ebdf77f99134004142eb69c42596c8ee not found: ID does not exist" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.901192 4583 scope.go:117] "RemoveContainer" containerID="b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47" Dec 09 15:07:29 crc kubenswrapper[4583]: I1209 15:07:29.901373 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47"} err="failed to get container status \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": rpc error: code = NotFound desc = could not find container \"b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47\": container with ID starting with b3142f5a6973960752b6dcf2ae4817d06c8316bd66815da2132efc1b7d823a47 not found: ID does not exist" Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.084334 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2acdc1e0-3af8-41bf-958f-3cd291da42db" path="/var/lib/kubelet/pods/2acdc1e0-3af8-41bf-958f-3cd291da42db/volumes" Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.730293 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"c3661ee7f8515900f32d827159e5c1eaad1239249ba0184d0bbf6153b7de1f9f"} Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.730340 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"df8f2c36a6f6a868f96bc681d341cdf0bfe581976f34c5f815be87b57dba549d"} Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.730353 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"80befb37b4e7ba140a8f5ad10f48656ffb50d2dfbb0409ebcaa7c5add407de73"} Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.730363 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"ac27da2cb62c66d086c68673db72e55d6efbbc05a6c3491104c32c9f88818b6a"} Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.730372 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"5817975eccc5c7d607c524a7b348e736adb9403e26d81f53ba055d18bf035c13"} Dec 09 15:07:30 crc kubenswrapper[4583]: I1209 15:07:30.730381 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"8a43b543cab4ee213ae8d9d11c751004b1e1ddc587a45b32437dd4191c57f52e"} Dec 09 15:07:32 crc kubenswrapper[4583]: I1209 15:07:32.745809 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"2896899a5f68656db3442e534e4ad2cf2693be3c4748122a49b994438ea00559"} Dec 09 15:07:34 crc kubenswrapper[4583]: I1209 15:07:34.758079 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" event={"ID":"52746f26-898e-4d1e-98a6-e4ce6791a0eb","Type":"ContainerStarted","Data":"6a6333a2458ce11e09c8b7af4e9ac13cb51bb36830d4ae21976b79fec5c2fde2"} Dec 09 15:07:34 crc kubenswrapper[4583]: I1209 15:07:34.758389 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:34 crc kubenswrapper[4583]: I1209 15:07:34.780217 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:34 crc kubenswrapper[4583]: I1209 15:07:34.785598 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" podStartSLOduration=6.785583123 podStartE2EDuration="6.785583123s" podCreationTimestamp="2025-12-09 15:07:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:07:34.78128575 +0000 UTC m=+552.988734838" watchObservedRunningTime="2025-12-09 15:07:34.785583123 +0000 UTC m=+552.993032211" Dec 09 15:07:35 crc kubenswrapper[4583]: I1209 15:07:35.762675 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:35 crc kubenswrapper[4583]: I1209 15:07:35.762981 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:35 crc kubenswrapper[4583]: I1209 15:07:35.787060 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:39 crc kubenswrapper[4583]: I1209 15:07:39.078892 4583 scope.go:117] "RemoveContainer" containerID="dfd7f9f7c2999475afdbb72006e6582db9d7b1ea6a414d3091a6f918b7c37df9" Dec 09 15:07:39 crc kubenswrapper[4583]: E1209 15:07:39.079676 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-j2sh5_openshift-multus(c240c9e3-573f-4237-b433-deb0f21e0123)\"" pod="openshift-multus/multus-j2sh5" podUID="c240c9e3-573f-4237-b433-deb0f21e0123" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.208289 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b"] Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.209635 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.211684 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.220729 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b"] Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.397884 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx"] Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.398289 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.398406 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.398563 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fqhw\" (UniqueName: \"kubernetes.io/projected/56bb49ee-b985-4d14-9124-c9143393b8fb-kube-api-access-6fqhw\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.399099 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.407350 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx"] Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.499566 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fqhw\" (UniqueName: \"kubernetes.io/projected/56bb49ee-b985-4d14-9124-c9143393b8fb-kube-api-access-6fqhw\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.499615 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.499650 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kksn7\" (UniqueName: \"kubernetes.io/projected/adf5e854-c962-4889-8737-6d5197f9653a-kube-api-access-kksn7\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.499672 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.499702 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.499725 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.500146 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.500214 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.518241 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fqhw\" (UniqueName: \"kubernetes.io/projected/56bb49ee-b985-4d14-9124-c9143393b8fb-kube-api-access-6fqhw\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.521105 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.539990 4583 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(11cd6c66e56bfdef1d6b5b0585159d8f3cba5464285cdedccbaaa46ea375d865): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.540064 4583 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(11cd6c66e56bfdef1d6b5b0585159d8f3cba5464285cdedccbaaa46ea375d865): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.540090 4583 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(11cd6c66e56bfdef1d6b5b0585159d8f3cba5464285cdedccbaaa46ea375d865): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.540145 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace(56bb49ee-b985-4d14-9124-c9143393b8fb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace(56bb49ee-b985-4d14-9124-c9143393b8fb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(11cd6c66e56bfdef1d6b5b0585159d8f3cba5464285cdedccbaaa46ea375d865): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.601389 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kksn7\" (UniqueName: \"kubernetes.io/projected/adf5e854-c962-4889-8737-6d5197f9653a-kube-api-access-kksn7\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.601801 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.601852 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.602379 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.602497 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.617854 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kksn7\" (UniqueName: \"kubernetes.io/projected/adf5e854-c962-4889-8737-6d5197f9653a-kube-api-access-kksn7\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.709508 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.734011 4583 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(e2eccea192fa4b9f3b271a1d8f204e7b9fba5dae5ccc488168d4a3d463ff11d8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.734098 4583 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(e2eccea192fa4b9f3b271a1d8f204e7b9fba5dae5ccc488168d4a3d463ff11d8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.734130 4583 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(e2eccea192fa4b9f3b271a1d8f204e7b9fba5dae5ccc488168d4a3d463ff11d8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.734206 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace(adf5e854-c962-4889-8737-6d5197f9653a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace(adf5e854-c962-4889-8737-6d5197f9653a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(e2eccea192fa4b9f3b271a1d8f204e7b9fba5dae5ccc488168d4a3d463ff11d8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" podUID="adf5e854-c962-4889-8737-6d5197f9653a" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.821249 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.821259 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.821657 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: I1209 15:07:46.821741 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.843640 4583 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(52b21309c694d0ad3bd08339b3546bc37de8680d73c5afb98fbff3f8901ba26c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.843823 4583 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(52b21309c694d0ad3bd08339b3546bc37de8680d73c5afb98fbff3f8901ba26c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.843853 4583 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(52b21309c694d0ad3bd08339b3546bc37de8680d73c5afb98fbff3f8901ba26c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.843930 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace(adf5e854-c962-4889-8737-6d5197f9653a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace(adf5e854-c962-4889-8737-6d5197f9653a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_openshift-marketplace_adf5e854-c962-4889-8737-6d5197f9653a_0(52b21309c694d0ad3bd08339b3546bc37de8680d73c5afb98fbff3f8901ba26c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" podUID="adf5e854-c962-4889-8737-6d5197f9653a" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.849725 4583 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(024780fd128d00f2ffc9396a8d68f73626dad7e01ea43f77fa96f65d64a31b75): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.849801 4583 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(024780fd128d00f2ffc9396a8d68f73626dad7e01ea43f77fa96f65d64a31b75): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.849830 4583 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(024780fd128d00f2ffc9396a8d68f73626dad7e01ea43f77fa96f65d64a31b75): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:07:46 crc kubenswrapper[4583]: E1209 15:07:46.849887 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace(56bb49ee-b985-4d14-9124-c9143393b8fb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace(56bb49ee-b985-4d14-9124-c9143393b8fb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_openshift-marketplace_56bb49ee-b985-4d14-9124-c9143393b8fb_0(024780fd128d00f2ffc9396a8d68f73626dad7e01ea43f77fa96f65d64a31b75): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" Dec 09 15:07:52 crc kubenswrapper[4583]: I1209 15:07:52.080534 4583 scope.go:117] "RemoveContainer" containerID="dfd7f9f7c2999475afdbb72006e6582db9d7b1ea6a414d3091a6f918b7c37df9" Dec 09 15:07:52 crc kubenswrapper[4583]: I1209 15:07:52.850706 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2sh5_c240c9e3-573f-4237-b433-deb0f21e0123/kube-multus/2.log" Dec 09 15:07:52 crc kubenswrapper[4583]: I1209 15:07:52.851029 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2sh5" event={"ID":"c240c9e3-573f-4237-b433-deb0f21e0123","Type":"ContainerStarted","Data":"82de9aabff07e4dc48e9e8a48e6ad45fc31d5b4b14eb11e50eaf9700887c0966"} Dec 09 15:07:58 crc kubenswrapper[4583]: I1209 15:07:58.078131 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:58 crc kubenswrapper[4583]: I1209 15:07:58.078677 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:07:58 crc kubenswrapper[4583]: I1209 15:07:58.209153 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx"] Dec 09 15:07:58 crc kubenswrapper[4583]: I1209 15:07:58.877385 4583 generic.go:334] "Generic (PLEG): container finished" podID="adf5e854-c962-4889-8737-6d5197f9653a" containerID="92c2acb2d0555d2a59929584cc7062ed546a30b1df48e043234093d30318fc2a" exitCode=0 Dec 09 15:07:58 crc kubenswrapper[4583]: I1209 15:07:58.877509 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" event={"ID":"adf5e854-c962-4889-8737-6d5197f9653a","Type":"ContainerDied","Data":"92c2acb2d0555d2a59929584cc7062ed546a30b1df48e043234093d30318fc2a"} Dec 09 15:07:58 crc kubenswrapper[4583]: I1209 15:07:58.878076 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" event={"ID":"adf5e854-c962-4889-8737-6d5197f9653a","Type":"ContainerStarted","Data":"15d037087ef0f05c6ac5a0284b1fc187bba7b62eb8f0a9c636240343d06d0768"} Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.246898 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-64sps" Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.391710 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.391807 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.391861 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.392396 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e5d33c84ff6a93e32880d70a3785cf1929995f7d39b05755a27bd726e3b26c85"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.392473 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://e5d33c84ff6a93e32880d70a3785cf1929995f7d39b05755a27bd726e3b26c85" gracePeriod=600 Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.886927 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="e5d33c84ff6a93e32880d70a3785cf1929995f7d39b05755a27bd726e3b26c85" exitCode=0 Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.887010 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"e5d33c84ff6a93e32880d70a3785cf1929995f7d39b05755a27bd726e3b26c85"} Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.887208 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"2b71e264e7c07931c21a874e87d0e92983343bc552a933cf51a341ffed03af7a"} Dec 09 15:07:59 crc kubenswrapper[4583]: I1209 15:07:59.887229 4583 scope.go:117] "RemoveContainer" containerID="a3cd7ba3758e939105e3e602682419ab4dde31e2b1948da8cf196038e53ae5c0" Dec 09 15:08:00 crc kubenswrapper[4583]: I1209 15:08:00.894244 4583 generic.go:334] "Generic (PLEG): container finished" podID="adf5e854-c962-4889-8737-6d5197f9653a" containerID="64dfa7555514afc418b755c1b3099b649c2229ffddcbb415dbb7bf9b0fc28754" exitCode=0 Dec 09 15:08:00 crc kubenswrapper[4583]: I1209 15:08:00.894283 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" event={"ID":"adf5e854-c962-4889-8737-6d5197f9653a","Type":"ContainerDied","Data":"64dfa7555514afc418b755c1b3099b649c2229ffddcbb415dbb7bf9b0fc28754"} Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.078498 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.079141 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.229932 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b"] Dec 09 15:08:01 crc kubenswrapper[4583]: W1209 15:08:01.233720 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56bb49ee_b985_4d14_9124_c9143393b8fb.slice/crio-217eac3541237af9efdb3af4b954a2fd746856253dcb1e1602e9c59c844a508d WatchSource:0}: Error finding container 217eac3541237af9efdb3af4b954a2fd746856253dcb1e1602e9c59c844a508d: Status 404 returned error can't find the container with id 217eac3541237af9efdb3af4b954a2fd746856253dcb1e1602e9c59c844a508d Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.904410 4583 generic.go:334] "Generic (PLEG): container finished" podID="adf5e854-c962-4889-8737-6d5197f9653a" containerID="6aaa468470f0473ac4da3734a83ae49eca420a1df95add1c48cb9ba9dcf942a0" exitCode=0 Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.904514 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" event={"ID":"adf5e854-c962-4889-8737-6d5197f9653a","Type":"ContainerDied","Data":"6aaa468470f0473ac4da3734a83ae49eca420a1df95add1c48cb9ba9dcf942a0"} Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.907315 4583 generic.go:334] "Generic (PLEG): container finished" podID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerID="12b1fd1d1d542e7913003c3864e2051cee4090f09c5acd3d96a0184697776315" exitCode=0 Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.907362 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" event={"ID":"56bb49ee-b985-4d14-9124-c9143393b8fb","Type":"ContainerDied","Data":"12b1fd1d1d542e7913003c3864e2051cee4090f09c5acd3d96a0184697776315"} Dec 09 15:08:01 crc kubenswrapper[4583]: I1209 15:08:01.907390 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" event={"ID":"56bb49ee-b985-4d14-9124-c9143393b8fb","Type":"ContainerStarted","Data":"217eac3541237af9efdb3af4b954a2fd746856253dcb1e1602e9c59c844a508d"} Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.089973 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.283881 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kksn7\" (UniqueName: \"kubernetes.io/projected/adf5e854-c962-4889-8737-6d5197f9653a-kube-api-access-kksn7\") pod \"adf5e854-c962-4889-8737-6d5197f9653a\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.283947 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-bundle\") pod \"adf5e854-c962-4889-8737-6d5197f9653a\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.284032 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-util\") pod \"adf5e854-c962-4889-8737-6d5197f9653a\" (UID: \"adf5e854-c962-4889-8737-6d5197f9653a\") " Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.285897 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-bundle" (OuterVolumeSpecName: "bundle") pod "adf5e854-c962-4889-8737-6d5197f9653a" (UID: "adf5e854-c962-4889-8737-6d5197f9653a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.289239 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adf5e854-c962-4889-8737-6d5197f9653a-kube-api-access-kksn7" (OuterVolumeSpecName: "kube-api-access-kksn7") pod "adf5e854-c962-4889-8737-6d5197f9653a" (UID: "adf5e854-c962-4889-8737-6d5197f9653a"). InnerVolumeSpecName "kube-api-access-kksn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.293648 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-util" (OuterVolumeSpecName: "util") pod "adf5e854-c962-4889-8737-6d5197f9653a" (UID: "adf5e854-c962-4889-8737-6d5197f9653a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.386107 4583 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-util\") on node \"crc\" DevicePath \"\"" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.386137 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kksn7\" (UniqueName: \"kubernetes.io/projected/adf5e854-c962-4889-8737-6d5197f9653a-kube-api-access-kksn7\") on node \"crc\" DevicePath \"\"" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.386168 4583 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adf5e854-c962-4889-8737-6d5197f9653a-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.918546 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" event={"ID":"adf5e854-c962-4889-8737-6d5197f9653a","Type":"ContainerDied","Data":"15d037087ef0f05c6ac5a0284b1fc187bba7b62eb8f0a9c636240343d06d0768"} Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.918893 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15d037087ef0f05c6ac5a0284b1fc187bba7b62eb8f0a9c636240343d06d0768" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.918586 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx" Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.920047 4583 generic.go:334] "Generic (PLEG): container finished" podID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerID="5b297b7e343f24bb523d9932bb763249de7884be3995ad7b1d58b3a21b8a73e6" exitCode=0 Dec 09 15:08:03 crc kubenswrapper[4583]: I1209 15:08:03.920068 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" event={"ID":"56bb49ee-b985-4d14-9124-c9143393b8fb","Type":"ContainerDied","Data":"5b297b7e343f24bb523d9932bb763249de7884be3995ad7b1d58b3a21b8a73e6"} Dec 09 15:08:04 crc kubenswrapper[4583]: I1209 15:08:04.928567 4583 generic.go:334] "Generic (PLEG): container finished" podID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerID="d47d5071431d22c1b840fd9125fe1240711063fb5c3552db612ef6e416a14462" exitCode=0 Dec 09 15:08:04 crc kubenswrapper[4583]: I1209 15:08:04.928682 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" event={"ID":"56bb49ee-b985-4d14-9124-c9143393b8fb","Type":"ContainerDied","Data":"d47d5071431d22c1b840fd9125fe1240711063fb5c3552db612ef6e416a14462"} Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.102773 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.215921 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fqhw\" (UniqueName: \"kubernetes.io/projected/56bb49ee-b985-4d14-9124-c9143393b8fb-kube-api-access-6fqhw\") pod \"56bb49ee-b985-4d14-9124-c9143393b8fb\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.216205 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-bundle\") pod \"56bb49ee-b985-4d14-9124-c9143393b8fb\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.216227 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-util\") pod \"56bb49ee-b985-4d14-9124-c9143393b8fb\" (UID: \"56bb49ee-b985-4d14-9124-c9143393b8fb\") " Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.217144 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-bundle" (OuterVolumeSpecName: "bundle") pod "56bb49ee-b985-4d14-9124-c9143393b8fb" (UID: "56bb49ee-b985-4d14-9124-c9143393b8fb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.220936 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56bb49ee-b985-4d14-9124-c9143393b8fb-kube-api-access-6fqhw" (OuterVolumeSpecName: "kube-api-access-6fqhw") pod "56bb49ee-b985-4d14-9124-c9143393b8fb" (UID: "56bb49ee-b985-4d14-9124-c9143393b8fb"). InnerVolumeSpecName "kube-api-access-6fqhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.226283 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-util" (OuterVolumeSpecName: "util") pod "56bb49ee-b985-4d14-9124-c9143393b8fb" (UID: "56bb49ee-b985-4d14-9124-c9143393b8fb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.317247 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fqhw\" (UniqueName: \"kubernetes.io/projected/56bb49ee-b985-4d14-9124-c9143393b8fb-kube-api-access-6fqhw\") on node \"crc\" DevicePath \"\"" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.317278 4583 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.317288 4583 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56bb49ee-b985-4d14-9124-c9143393b8fb-util\") on node \"crc\" DevicePath \"\"" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.939628 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" event={"ID":"56bb49ee-b985-4d14-9124-c9143393b8fb","Type":"ContainerDied","Data":"217eac3541237af9efdb3af4b954a2fd746856253dcb1e1602e9c59c844a508d"} Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.939672 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="217eac3541237af9efdb3af4b954a2fd746856253dcb1e1602e9c59c844a508d" Dec 09 15:08:06 crc kubenswrapper[4583]: I1209 15:08:06.939695 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867014 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc"] Dec 09 15:08:10 crc kubenswrapper[4583]: E1209 15:08:10.867533 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="extract" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867544 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="extract" Dec 09 15:08:10 crc kubenswrapper[4583]: E1209 15:08:10.867553 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="pull" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867558 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="pull" Dec 09 15:08:10 crc kubenswrapper[4583]: E1209 15:08:10.867569 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="extract" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867575 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="extract" Dec 09 15:08:10 crc kubenswrapper[4583]: E1209 15:08:10.867584 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="pull" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867590 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="pull" Dec 09 15:08:10 crc kubenswrapper[4583]: E1209 15:08:10.867596 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="util" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867601 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="util" Dec 09 15:08:10 crc kubenswrapper[4583]: E1209 15:08:10.867614 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="util" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867619 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="util" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867703 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="adf5e854-c962-4889-8737-6d5197f9653a" containerName="extract" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.867719 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="56bb49ee-b985-4d14-9124-c9143393b8fb" containerName="extract" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.868190 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.871030 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.871082 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-tmnrf" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.871141 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.871246 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.871262 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.871325 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.873819 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l65sm\" (UniqueName: \"kubernetes.io/projected/b5bc8251-67d0-47c6-837a-b10f8214e1ca-kube-api-access-l65sm\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.873872 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/b5bc8251-67d0-47c6-837a-b10f8214e1ca-manager-config\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.873893 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-webhook-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.873922 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-apiservice-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.873947 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.886055 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc"] Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.975217 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-apiservice-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.975286 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.975359 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l65sm\" (UniqueName: \"kubernetes.io/projected/b5bc8251-67d0-47c6-837a-b10f8214e1ca-kube-api-access-l65sm\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.975390 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/b5bc8251-67d0-47c6-837a-b10f8214e1ca-manager-config\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.975405 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-webhook-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.976152 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/b5bc8251-67d0-47c6-837a-b10f8214e1ca-manager-config\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.984299 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-webhook-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.984953 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:10 crc kubenswrapper[4583]: I1209 15:08:10.989214 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b5bc8251-67d0-47c6-837a-b10f8214e1ca-apiservice-cert\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:11 crc kubenswrapper[4583]: I1209 15:08:11.000658 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l65sm\" (UniqueName: \"kubernetes.io/projected/b5bc8251-67d0-47c6-837a-b10f8214e1ca-kube-api-access-l65sm\") pod \"loki-operator-controller-manager-7fb468c877-9bdpc\" (UID: \"b5bc8251-67d0-47c6-837a-b10f8214e1ca\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:11 crc kubenswrapper[4583]: I1209 15:08:11.180128 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:11 crc kubenswrapper[4583]: I1209 15:08:11.549349 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc"] Dec 09 15:08:11 crc kubenswrapper[4583]: W1209 15:08:11.553901 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5bc8251_67d0_47c6_837a_b10f8214e1ca.slice/crio-8bc7a80dae2d1593dbd27143cafb8b106831c27a122e58bcbae6547794855ab1 WatchSource:0}: Error finding container 8bc7a80dae2d1593dbd27143cafb8b106831c27a122e58bcbae6547794855ab1: Status 404 returned error can't find the container with id 8bc7a80dae2d1593dbd27143cafb8b106831c27a122e58bcbae6547794855ab1 Dec 09 15:08:11 crc kubenswrapper[4583]: I1209 15:08:11.963005 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" event={"ID":"b5bc8251-67d0-47c6-837a-b10f8214e1ca","Type":"ContainerStarted","Data":"8bc7a80dae2d1593dbd27143cafb8b106831c27a122e58bcbae6547794855ab1"} Dec 09 15:08:14 crc kubenswrapper[4583]: I1209 15:08:14.984950 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" event={"ID":"b5bc8251-67d0-47c6-837a-b10f8214e1ca","Type":"ContainerStarted","Data":"50499e74c6769b02c7e046f615d7d3fc06b3964ce912479d540d81769e18bb3d"} Dec 09 15:08:20 crc kubenswrapper[4583]: I1209 15:08:20.017499 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" event={"ID":"b5bc8251-67d0-47c6-837a-b10f8214e1ca","Type":"ContainerStarted","Data":"d8d27e640eeee9028ed01b1ae37a0b579c4510f1e8b34689a27ff67b8768f111"} Dec 09 15:08:20 crc kubenswrapper[4583]: I1209 15:08:20.017870 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:20 crc kubenswrapper[4583]: I1209 15:08:20.019440 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" Dec 09 15:08:20 crc kubenswrapper[4583]: I1209 15:08:20.033369 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-7fb468c877-9bdpc" podStartSLOduration=2.1393739529999998 podStartE2EDuration="10.033352618s" podCreationTimestamp="2025-12-09 15:08:10 +0000 UTC" firstStartedPulling="2025-12-09 15:08:11.555728687 +0000 UTC m=+589.763177764" lastFinishedPulling="2025-12-09 15:08:19.449707341 +0000 UTC m=+597.657156429" observedRunningTime="2025-12-09 15:08:20.03067784 +0000 UTC m=+598.238126928" watchObservedRunningTime="2025-12-09 15:08:20.033352618 +0000 UTC m=+598.240801706" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.642635 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-xz6wb"] Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.643234 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.645348 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.647634 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.647671 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-h82g7" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.660457 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-xz6wb"] Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.691213 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjfxn\" (UniqueName: \"kubernetes.io/projected/bc4e3345-4a60-4071-a644-d51029c38bd2-kube-api-access-zjfxn\") pod \"cluster-logging-operator-ff9846bd-xz6wb\" (UID: \"bc4e3345-4a60-4071-a644-d51029c38bd2\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.792267 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjfxn\" (UniqueName: \"kubernetes.io/projected/bc4e3345-4a60-4071-a644-d51029c38bd2-kube-api-access-zjfxn\") pod \"cluster-logging-operator-ff9846bd-xz6wb\" (UID: \"bc4e3345-4a60-4071-a644-d51029c38bd2\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.807045 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjfxn\" (UniqueName: \"kubernetes.io/projected/bc4e3345-4a60-4071-a644-d51029c38bd2-kube-api-access-zjfxn\") pod \"cluster-logging-operator-ff9846bd-xz6wb\" (UID: \"bc4e3345-4a60-4071-a644-d51029c38bd2\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" Dec 09 15:08:21 crc kubenswrapper[4583]: I1209 15:08:21.955289 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" Dec 09 15:08:22 crc kubenswrapper[4583]: I1209 15:08:22.138488 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-xz6wb"] Dec 09 15:08:23 crc kubenswrapper[4583]: I1209 15:08:23.041260 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" event={"ID":"bc4e3345-4a60-4071-a644-d51029c38bd2","Type":"ContainerStarted","Data":"a5b24c00f2985b70165b46b0f117c49190955ee958a3878cd4acfb6c1e785c75"} Dec 09 15:08:26 crc kubenswrapper[4583]: I1209 15:08:26.055688 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" event={"ID":"bc4e3345-4a60-4071-a644-d51029c38bd2","Type":"ContainerStarted","Data":"09936541597f95cf710e665acd6df623b10381d4c52dbb745798549e26681448"} Dec 09 15:08:26 crc kubenswrapper[4583]: I1209 15:08:26.071287 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-ff9846bd-xz6wb" podStartSLOduration=1.430339855 podStartE2EDuration="5.071272402s" podCreationTimestamp="2025-12-09 15:08:21 +0000 UTC" firstStartedPulling="2025-12-09 15:08:22.16142772 +0000 UTC m=+600.368876808" lastFinishedPulling="2025-12-09 15:08:25.802360267 +0000 UTC m=+604.009809355" observedRunningTime="2025-12-09 15:08:26.067064751 +0000 UTC m=+604.274513840" watchObservedRunningTime="2025-12-09 15:08:26.071272402 +0000 UTC m=+604.278721480" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.713867 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.714672 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.716538 4583 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-5ddf6" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.716558 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.716945 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.720374 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.880329 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp525\" (UniqueName: \"kubernetes.io/projected/0a7b4110-591e-4f38-8786-9e2375c77360-kube-api-access-sp525\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") " pod="minio-dev/minio" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.880473 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a55945de-8ab2-4463-a5ac-b3637661a435\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a55945de-8ab2-4463-a5ac-b3637661a435\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") " pod="minio-dev/minio" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.981510 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a55945de-8ab2-4463-a5ac-b3637661a435\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a55945de-8ab2-4463-a5ac-b3637661a435\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") " pod="minio-dev/minio" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.981575 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp525\" (UniqueName: \"kubernetes.io/projected/0a7b4110-591e-4f38-8786-9e2375c77360-kube-api-access-sp525\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") " pod="minio-dev/minio" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.984049 4583 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.984079 4583 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a55945de-8ab2-4463-a5ac-b3637661a435\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a55945de-8ab2-4463-a5ac-b3637661a435\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/47fcc731572460c2720c7b9ba671bac0d48b790280b6a9c4e563fc2931a9807c/globalmount\"" pod="minio-dev/minio" Dec 09 15:08:29 crc kubenswrapper[4583]: I1209 15:08:29.999216 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp525\" (UniqueName: \"kubernetes.io/projected/0a7b4110-591e-4f38-8786-9e2375c77360-kube-api-access-sp525\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") " pod="minio-dev/minio" Dec 09 15:08:30 crc kubenswrapper[4583]: I1209 15:08:30.002244 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a55945de-8ab2-4463-a5ac-b3637661a435\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a55945de-8ab2-4463-a5ac-b3637661a435\") pod \"minio\" (UID: \"0a7b4110-591e-4f38-8786-9e2375c77360\") " pod="minio-dev/minio" Dec 09 15:08:30 crc kubenswrapper[4583]: I1209 15:08:30.026309 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 09 15:08:30 crc kubenswrapper[4583]: I1209 15:08:30.414974 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 09 15:08:31 crc kubenswrapper[4583]: I1209 15:08:31.079719 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"0a7b4110-591e-4f38-8786-9e2375c77360","Type":"ContainerStarted","Data":"e64e76d6fd6a066dc8cc4a6ff1a3362d3de23bec720c45847d9b6228cf0c80d4"} Dec 09 15:08:34 crc kubenswrapper[4583]: I1209 15:08:34.100611 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"0a7b4110-591e-4f38-8786-9e2375c77360","Type":"ContainerStarted","Data":"6f39b835dee0f2f5974ea04f47a85a0806bc81b5fa4ee7d228ecfa5491da52ad"} Dec 09 15:08:34 crc kubenswrapper[4583]: I1209 15:08:34.113896 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=3.922944873 podStartE2EDuration="7.113882355s" podCreationTimestamp="2025-12-09 15:08:27 +0000 UTC" firstStartedPulling="2025-12-09 15:08:30.420472126 +0000 UTC m=+608.627921214" lastFinishedPulling="2025-12-09 15:08:33.611409608 +0000 UTC m=+611.818858696" observedRunningTime="2025-12-09 15:08:34.110765555 +0000 UTC m=+612.318214644" watchObservedRunningTime="2025-12-09 15:08:34.113882355 +0000 UTC m=+612.321331433" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.057701 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.058694 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.060720 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-ca-bundle" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.060874 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-dockercfg-r78h9" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.061008 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-grpc" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.061193 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-http" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.061341 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-config" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.071608 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.173737 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/092b4cea-9783-446e-885d-894810464116-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.173792 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/092b4cea-9783-446e-885d-894810464116-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.173833 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vkh2\" (UniqueName: \"kubernetes.io/projected/092b4cea-9783-446e-885d-894810464116-kube-api-access-8vkh2\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.173864 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/092b4cea-9783-446e-885d-894810464116-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.173985 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/092b4cea-9783-446e-885d-894810464116-config\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.202357 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-pvl24"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.203039 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.205920 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-grpc" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.206010 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-s3" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.205932 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-http" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.213800 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-pvl24"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.254430 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.255240 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.257728 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-http" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.257905 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-grpc" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.262966 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.275240 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/092b4cea-9783-446e-885d-894810464116-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.275283 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/092b4cea-9783-446e-885d-894810464116-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.275336 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vkh2\" (UniqueName: \"kubernetes.io/projected/092b4cea-9783-446e-885d-894810464116-kube-api-access-8vkh2\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.275384 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/092b4cea-9783-446e-885d-894810464116-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.275481 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/092b4cea-9783-446e-885d-894810464116-config\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.276445 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/092b4cea-9783-446e-885d-894810464116-config\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.276860 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/092b4cea-9783-446e-885d-894810464116-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.281551 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/092b4cea-9783-446e-885d-894810464116-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.297845 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/092b4cea-9783-446e-885d-894810464116-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.299539 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vkh2\" (UniqueName: \"kubernetes.io/projected/092b4cea-9783-446e-885d-894810464116-kube-api-access-8vkh2\") pod \"logging-loki-distributor-76cc67bf56-t8lbn\" (UID: \"092b4cea-9783-446e-885d-894810464116\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.323131 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.324010 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.325311 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-client-http" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.325389 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway-ca-bundle" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.325424 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-http" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.325899 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.325967 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.335148 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.336075 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.337941 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-dockercfg-m5lrf" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.352476 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.355755 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.372273 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376475 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376563 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376608 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv9nz\" (UniqueName: \"kubernetes.io/projected/3a141193-2b55-4706-af21-6a17b6f53ff1-kube-api-access-jv9nz\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376630 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a141193-2b55-4706-af21-6a17b6f53ff1-config\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376690 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/429ef0ec-d210-48ef-98c2-0ba02186f05d-config\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376769 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376815 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376850 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z2gw\" (UniqueName: \"kubernetes.io/projected/429ef0ec-d210-48ef-98c2-0ba02186f05d-kube-api-access-9z2gw\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376870 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376909 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.376926 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.477871 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv9nz\" (UniqueName: \"kubernetes.io/projected/3a141193-2b55-4706-af21-6a17b6f53ff1-kube-api-access-jv9nz\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.477912 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.477937 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a141193-2b55-4706-af21-6a17b6f53ff1-config\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.477955 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-rbac\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.477974 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnd7b\" (UniqueName: \"kubernetes.io/projected/b0233b96-19b0-4b07-b935-ca38b3707646-kube-api-access-rnd7b\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.477991 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/429ef0ec-d210-48ef-98c2-0ba02186f05d-config\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478011 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478030 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478046 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-rbac\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478060 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478076 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z2gw\" (UniqueName: \"kubernetes.io/projected/429ef0ec-d210-48ef-98c2-0ba02186f05d-kube-api-access-9z2gw\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478095 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478109 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4zsc\" (UniqueName: \"kubernetes.io/projected/249a9969-3fdd-497d-bdda-3bea0481a4dd-kube-api-access-f4zsc\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478138 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478154 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478170 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-tenants\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478193 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478208 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478227 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-tls-secret\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478247 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-lokistack-gateway\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478262 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478278 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-tls-secret\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478302 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478318 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-tenants\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478350 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478383 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-lokistack-gateway\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.478404 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.479786 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/429ef0ec-d210-48ef-98c2-0ba02186f05d-config\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.479972 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.480435 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a141193-2b55-4706-af21-6a17b6f53ff1-config\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.480872 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.485498 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.485527 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.485699 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.486976 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/3a141193-2b55-4706-af21-6a17b6f53ff1-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.489786 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/429ef0ec-d210-48ef-98c2-0ba02186f05d-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.493025 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv9nz\" (UniqueName: \"kubernetes.io/projected/3a141193-2b55-4706-af21-6a17b6f53ff1-kube-api-access-jv9nz\") pod \"logging-loki-querier-5895d59bb8-pvl24\" (UID: \"3a141193-2b55-4706-af21-6a17b6f53ff1\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.495315 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z2gw\" (UniqueName: \"kubernetes.io/projected/429ef0ec-d210-48ef-98c2-0ba02186f05d-kube-api-access-9z2gw\") pod \"logging-loki-query-frontend-84558f7c9f-drm5l\" (UID: \"429ef0ec-d210-48ef-98c2-0ba02186f05d\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.515434 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.565239 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580155 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-tls-secret\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580217 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580240 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-lokistack-gateway\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580279 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-tls-secret\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580306 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580356 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-tenants\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580381 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-lokistack-gateway\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580401 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580439 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-rbac\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580830 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnd7b\" (UniqueName: \"kubernetes.io/projected/b0233b96-19b0-4b07-b935-ca38b3707646-kube-api-access-rnd7b\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.580862 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-rbac\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.581436 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-lokistack-gateway\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.581442 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.582836 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.583119 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.583156 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4zsc\" (UniqueName: \"kubernetes.io/projected/249a9969-3fdd-497d-bdda-3bea0481a4dd-kube-api-access-f4zsc\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.583212 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.583291 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.583315 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-tenants\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.584211 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-rbac\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.584230 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.584336 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-tls-secret\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.584566 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-lokistack-gateway\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.585573 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b0233b96-19b0-4b07-b935-ca38b3707646-rbac\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.587668 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-ca-bundle\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.588936 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-tenants\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.589120 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-tls-secret\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.589342 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b0233b96-19b0-4b07-b935-ca38b3707646-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.591719 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-tenants\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.602895 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/249a9969-3fdd-497d-bdda-3bea0481a4dd-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.606279 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4zsc\" (UniqueName: \"kubernetes.io/projected/249a9969-3fdd-497d-bdda-3bea0481a4dd-kube-api-access-f4zsc\") pod \"logging-loki-gateway-7ff5bd49dc-bf46s\" (UID: \"249a9969-3fdd-497d-bdda-3bea0481a4dd\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.623272 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnd7b\" (UniqueName: \"kubernetes.io/projected/b0233b96-19b0-4b07-b935-ca38b3707646-kube-api-access-rnd7b\") pod \"logging-loki-gateway-7ff5bd49dc-xpkxx\" (UID: \"b0233b96-19b0-4b07-b935-ca38b3707646\") " pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.638831 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.647094 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.730653 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn"] Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.873684 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-pvl24"] Dec 09 15:08:38 crc kubenswrapper[4583]: W1209 15:08:38.876371 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a141193_2b55_4706_af21_6a17b6f53ff1.slice/crio-7e97276bbb2d38e77544a00d333f474257fad3ce36019116547858c91dc53d45 WatchSource:0}: Error finding container 7e97276bbb2d38e77544a00d333f474257fad3ce36019116547858c91dc53d45: Status 404 returned error can't find the container with id 7e97276bbb2d38e77544a00d333f474257fad3ce36019116547858c91dc53d45 Dec 09 15:08:38 crc kubenswrapper[4583]: I1209 15:08:38.960580 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l"] Dec 09 15:08:38 crc kubenswrapper[4583]: W1209 15:08:38.961680 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod429ef0ec_d210_48ef_98c2_0ba02186f05d.slice/crio-de52c0c7d5170f4d101c6f695bb1d8a68513202d7746f4243da882566f835e44 WatchSource:0}: Error finding container de52c0c7d5170f4d101c6f695bb1d8a68513202d7746f4243da882566f835e44: Status 404 returned error can't find the container with id de52c0c7d5170f4d101c6f695bb1d8a68513202d7746f4243da882566f835e44 Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.033851 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s"] Dec 09 15:08:39 crc kubenswrapper[4583]: W1209 15:08:39.036285 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod249a9969_3fdd_497d_bdda_3bea0481a4dd.slice/crio-449205c7cc2dacf81b92ac8ebb2eb121aad87f019158681dad2abcd394b53f2c WatchSource:0}: Error finding container 449205c7cc2dacf81b92ac8ebb2eb121aad87f019158681dad2abcd394b53f2c: Status 404 returned error can't find the container with id 449205c7cc2dacf81b92ac8ebb2eb121aad87f019158681dad2abcd394b53f2c Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.074489 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx"] Dec 09 15:08:39 crc kubenswrapper[4583]: W1209 15:08:39.076383 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0233b96_19b0_4b07_b935_ca38b3707646.slice/crio-dce88f51e526d378f27ece2ed8e0bc82cccab38a92fb1cc05458812e493bc79e WatchSource:0}: Error finding container dce88f51e526d378f27ece2ed8e0bc82cccab38a92fb1cc05458812e493bc79e: Status 404 returned error can't find the container with id dce88f51e526d378f27ece2ed8e0bc82cccab38a92fb1cc05458812e493bc79e Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.120248 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" event={"ID":"249a9969-3fdd-497d-bdda-3bea0481a4dd","Type":"ContainerStarted","Data":"449205c7cc2dacf81b92ac8ebb2eb121aad87f019158681dad2abcd394b53f2c"} Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.121391 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" event={"ID":"b0233b96-19b0-4b07-b935-ca38b3707646","Type":"ContainerStarted","Data":"dce88f51e526d378f27ece2ed8e0bc82cccab38a92fb1cc05458812e493bc79e"} Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.122257 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" event={"ID":"3a141193-2b55-4706-af21-6a17b6f53ff1","Type":"ContainerStarted","Data":"7e97276bbb2d38e77544a00d333f474257fad3ce36019116547858c91dc53d45"} Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.123051 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" event={"ID":"092b4cea-9783-446e-885d-894810464116","Type":"ContainerStarted","Data":"f152a7364ba6835b5fd9e6e79c9eea4a0c4aace3f4a5261b91df71ed7e74acb3"} Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.123707 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" event={"ID":"429ef0ec-d210-48ef-98c2-0ba02186f05d","Type":"ContainerStarted","Data":"de52c0c7d5170f4d101c6f695bb1d8a68513202d7746f4243da882566f835e44"} Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.236064 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.236702 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.239019 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.239453 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.241208 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-http" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.241442 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-grpc" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.241462 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-http" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.241989 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-grpc" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.246437 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.251475 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.286007 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.286703 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.288817 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-http" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.289033 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-grpc" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.292690 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396074 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9xb9\" (UniqueName: \"kubernetes.io/projected/b1709e50-5632-47cd-93bf-2de3b7e53b06-kube-api-access-z9xb9\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396133 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396195 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396216 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396351 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396403 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396447 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396474 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396492 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396512 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396588 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qqvq\" (UniqueName: \"kubernetes.io/projected/cc853021-5a5e-4f66-ab41-d70e667fa5f5-kube-api-access-8qqvq\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396617 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396649 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396666 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396685 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc853021-5a5e-4f66-ab41-d70e667fa5f5-config\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396783 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396809 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1709e50-5632-47cd-93bf-2de3b7e53b06-config\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396829 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396844 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/272c488c-ff6e-4650-9e82-8d5d45913fb1-config\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396868 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396888 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts8jl\" (UniqueName: \"kubernetes.io/projected/272c488c-ff6e-4650-9e82-8d5d45913fb1-kube-api-access-ts8jl\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.396936 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498221 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9xb9\" (UniqueName: \"kubernetes.io/projected/b1709e50-5632-47cd-93bf-2de3b7e53b06-kube-api-access-z9xb9\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498443 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498473 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498491 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498519 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498540 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498560 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498576 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498592 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498609 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498635 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498651 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qqvq\" (UniqueName: \"kubernetes.io/projected/cc853021-5a5e-4f66-ab41-d70e667fa5f5-kube-api-access-8qqvq\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498671 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498687 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498707 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc853021-5a5e-4f66-ab41-d70e667fa5f5-config\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498728 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498756 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1709e50-5632-47cd-93bf-2de3b7e53b06-config\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498775 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498791 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/272c488c-ff6e-4650-9e82-8d5d45913fb1-config\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498808 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498825 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.498838 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts8jl\" (UniqueName: \"kubernetes.io/projected/272c488c-ff6e-4650-9e82-8d5d45913fb1-kube-api-access-ts8jl\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.499367 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.499870 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.500039 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1709e50-5632-47cd-93bf-2de3b7e53b06-config\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.500065 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc853021-5a5e-4f66-ab41-d70e667fa5f5-config\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.500846 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.500876 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/272c488c-ff6e-4650-9e82-8d5d45913fb1-config\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.502316 4583 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.502364 4583 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a5b7abe8aeebc8723e6ecd20de43a8648fcc463fe0f62ea2ccb7f5170ea23cd9/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.503174 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.504257 4583 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.504288 4583 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f77731aeee9c1952581ed7848212c20f7fab0c69591c12f672b614fdd4476ab2/globalmount\"" pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.504789 4583 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.504814 4583 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/404aabe92da881cbabaea6903c37326718dd2009a9c3313a7964fc1ff93ac839/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.505052 4583 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.505085 4583 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/92483021878e84b60decd7b9e25741a91984319f7cae7943251d3548fbfe7425/globalmount\"" pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.505648 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.505806 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.505833 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.506731 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.506902 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.506952 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/272c488c-ff6e-4650-9e82-8d5d45913fb1-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.507453 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/b1709e50-5632-47cd-93bf-2de3b7e53b06-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.507653 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/cc853021-5a5e-4f66-ab41-d70e667fa5f5-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.513607 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9xb9\" (UniqueName: \"kubernetes.io/projected/b1709e50-5632-47cd-93bf-2de3b7e53b06-kube-api-access-z9xb9\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.514799 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts8jl\" (UniqueName: \"kubernetes.io/projected/272c488c-ff6e-4650-9e82-8d5d45913fb1-kube-api-access-ts8jl\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.518178 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qqvq\" (UniqueName: \"kubernetes.io/projected/cc853021-5a5e-4f66-ab41-d70e667fa5f5-kube-api-access-8qqvq\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.529160 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df05981e-f6dc-47ea-88a8-8183cdc69af1\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.530224 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c0d88558-4e3e-4572-89f4-19c9ec34305d\") pod \"logging-loki-compactor-0\" (UID: \"272c488c-ff6e-4650-9e82-8d5d45913fb1\") " pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.530389 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84f636d7-b458-4e8e-8ea1-5b03e02e62ad\") pod \"logging-loki-ingester-0\" (UID: \"cc853021-5a5e-4f66-ab41-d70e667fa5f5\") " pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.530723 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2cc2ad5b-7c8b-4ea3-970c-a17d97a615f3\") pod \"logging-loki-index-gateway-0\" (UID: \"b1709e50-5632-47cd-93bf-2de3b7e53b06\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.553163 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.559176 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.616654 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.899672 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.938123 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: W1209 15:08:39.940189 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc853021_5a5e_4f66_ab41_d70e667fa5f5.slice/crio-2d850fc66ccf305bf1f9f1b8ba510b072b52270739df1d0af2acfdb4124d13a2 WatchSource:0}: Error finding container 2d850fc66ccf305bf1f9f1b8ba510b072b52270739df1d0af2acfdb4124d13a2: Status 404 returned error can't find the container with id 2d850fc66ccf305bf1f9f1b8ba510b072b52270739df1d0af2acfdb4124d13a2 Dec 09 15:08:39 crc kubenswrapper[4583]: I1209 15:08:39.984849 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 09 15:08:39 crc kubenswrapper[4583]: W1209 15:08:39.987292 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1709e50_5632_47cd_93bf_2de3b7e53b06.slice/crio-b4cedce80170a1e2c34b8635260cc8601e87ce36db23a93dca21066caf194e15 WatchSource:0}: Error finding container b4cedce80170a1e2c34b8635260cc8601e87ce36db23a93dca21066caf194e15: Status 404 returned error can't find the container with id b4cedce80170a1e2c34b8635260cc8601e87ce36db23a93dca21066caf194e15 Dec 09 15:08:40 crc kubenswrapper[4583]: I1209 15:08:40.129104 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"cc853021-5a5e-4f66-ab41-d70e667fa5f5","Type":"ContainerStarted","Data":"2d850fc66ccf305bf1f9f1b8ba510b072b52270739df1d0af2acfdb4124d13a2"} Dec 09 15:08:40 crc kubenswrapper[4583]: I1209 15:08:40.129790 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"b1709e50-5632-47cd-93bf-2de3b7e53b06","Type":"ContainerStarted","Data":"b4cedce80170a1e2c34b8635260cc8601e87ce36db23a93dca21066caf194e15"} Dec 09 15:08:40 crc kubenswrapper[4583]: I1209 15:08:40.130445 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"272c488c-ff6e-4650-9e82-8d5d45913fb1","Type":"ContainerStarted","Data":"6af0363bc7b417a324bb7eaa49e8c43a1d86b894745141100287c3ad38e49f77"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.155081 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" event={"ID":"b0233b96-19b0-4b07-b935-ca38b3707646","Type":"ContainerStarted","Data":"672a7e0443a38d83a1139b42ae6d15cf51440429004a3852c728e4d04eeb24fa"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.156372 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" event={"ID":"092b4cea-9783-446e-885d-894810464116","Type":"ContainerStarted","Data":"fabde67dded4cb4113850f1746e4185f6ef4bee93ee028ece4101a743df2c307"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.156494 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.157592 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"272c488c-ff6e-4650-9e82-8d5d45913fb1","Type":"ContainerStarted","Data":"87c639e6178c477da1f3a5c1db4ed2d434b5dbb7d4192de1f086bfe387381ab5"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.157727 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.158660 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" event={"ID":"3a141193-2b55-4706-af21-6a17b6f53ff1","Type":"ContainerStarted","Data":"732c0e4a881429c0e87dd309f58ddf744e8f4c40e0fc88e2b6fbf7fc31a4a3a4"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.158776 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.159969 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"cc853021-5a5e-4f66-ab41-d70e667fa5f5","Type":"ContainerStarted","Data":"7cb7e3a08f2d61eba90ff12098bae9547f492299868f2cecfe4f4dd290f35e52"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.160060 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.161658 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" event={"ID":"429ef0ec-d210-48ef-98c2-0ba02186f05d","Type":"ContainerStarted","Data":"803f5b52576f4c4584e32458a4e5e933b453a7f77fe008a6b2ab792ab4751d8e"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.161787 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.163235 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" event={"ID":"249a9969-3fdd-497d-bdda-3bea0481a4dd","Type":"ContainerStarted","Data":"dc3fdddbf6bc1671177d29651ad3c0d96b80bd368b2b601a09aaf71bdfb14dff"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.164389 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"b1709e50-5632-47cd-93bf-2de3b7e53b06","Type":"ContainerStarted","Data":"7947779da3881cc48991f1ba9f0c5bc84d8e98564f6a3b95673a88b647c801c2"} Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.164806 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.205399 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" podStartSLOduration=1.624022524 podStartE2EDuration="5.205383152s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:38.878259262 +0000 UTC m=+617.085708340" lastFinishedPulling="2025-12-09 15:08:42.45961988 +0000 UTC m=+620.667068968" observedRunningTime="2025-12-09 15:08:43.203334941 +0000 UTC m=+621.410784029" watchObservedRunningTime="2025-12-09 15:08:43.205383152 +0000 UTC m=+621.412832240" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.206322 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" podStartSLOduration=1.493661256 podStartE2EDuration="5.206317729s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:38.745046643 +0000 UTC m=+616.952495731" lastFinishedPulling="2025-12-09 15:08:42.457703116 +0000 UTC m=+620.665152204" observedRunningTime="2025-12-09 15:08:43.183775427 +0000 UTC m=+621.391224515" watchObservedRunningTime="2025-12-09 15:08:43.206317729 +0000 UTC m=+621.413766817" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.226267 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-index-gateway-0" podStartSLOduration=2.737068153 podStartE2EDuration="5.226254231s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:39.989201648 +0000 UTC m=+618.196650736" lastFinishedPulling="2025-12-09 15:08:42.478387725 +0000 UTC m=+620.685836814" observedRunningTime="2025-12-09 15:08:43.22524339 +0000 UTC m=+621.432692478" watchObservedRunningTime="2025-12-09 15:08:43.226254231 +0000 UTC m=+621.433703319" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.259501 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-ingester-0" podStartSLOduration=2.7445957659999998 podStartE2EDuration="5.259483755s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:39.942074886 +0000 UTC m=+618.149523974" lastFinishedPulling="2025-12-09 15:08:42.456962875 +0000 UTC m=+620.664411963" observedRunningTime="2025-12-09 15:08:43.240762387 +0000 UTC m=+621.448211475" watchObservedRunningTime="2025-12-09 15:08:43.259483755 +0000 UTC m=+621.466932843" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.269666 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-compactor-0" podStartSLOduration=2.738319127 podStartE2EDuration="5.26965089s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:39.905835554 +0000 UTC m=+618.113284642" lastFinishedPulling="2025-12-09 15:08:42.437167328 +0000 UTC m=+620.644616405" observedRunningTime="2025-12-09 15:08:43.265817573 +0000 UTC m=+621.473266661" watchObservedRunningTime="2025-12-09 15:08:43.26965089 +0000 UTC m=+621.477099977" Dec 09 15:08:43 crc kubenswrapper[4583]: I1209 15:08:43.280919 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" podStartSLOduration=1.7875560579999998 podStartE2EDuration="5.280910808s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:38.963579241 +0000 UTC m=+617.171028329" lastFinishedPulling="2025-12-09 15:08:42.456933991 +0000 UTC m=+620.664383079" observedRunningTime="2025-12-09 15:08:43.278140641 +0000 UTC m=+621.485589729" watchObservedRunningTime="2025-12-09 15:08:43.280910808 +0000 UTC m=+621.488359897" Dec 09 15:08:45 crc kubenswrapper[4583]: I1209 15:08:45.175939 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" event={"ID":"b0233b96-19b0-4b07-b935-ca38b3707646","Type":"ContainerStarted","Data":"eeb9c22c6fdc6d88a62dad12be6a852ea5822ac724ef80b27cc8c7764a999fa5"} Dec 09 15:08:45 crc kubenswrapper[4583]: I1209 15:08:45.176622 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:45 crc kubenswrapper[4583]: I1209 15:08:45.185210 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:45 crc kubenswrapper[4583]: I1209 15:08:45.191029 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" podStartSLOduration=1.6594740140000002 podStartE2EDuration="7.191013696s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:39.078152938 +0000 UTC m=+617.285602026" lastFinishedPulling="2025-12-09 15:08:44.609692619 +0000 UTC m=+622.817141708" observedRunningTime="2025-12-09 15:08:45.188261954 +0000 UTC m=+623.395711042" watchObservedRunningTime="2025-12-09 15:08:45.191013696 +0000 UTC m=+623.398462785" Dec 09 15:08:46 crc kubenswrapper[4583]: I1209 15:08:46.180249 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:46 crc kubenswrapper[4583]: I1209 15:08:46.185599 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-xpkxx" Dec 09 15:08:50 crc kubenswrapper[4583]: I1209 15:08:50.198640 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" event={"ID":"249a9969-3fdd-497d-bdda-3bea0481a4dd","Type":"ContainerStarted","Data":"01361ea36aeb0066be4ffb7e8fab826bded56abc71668acfceb14a1a4e9f00fb"} Dec 09 15:08:50 crc kubenswrapper[4583]: I1209 15:08:50.199596 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:50 crc kubenswrapper[4583]: I1209 15:08:50.205816 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:50 crc kubenswrapper[4583]: I1209 15:08:50.213392 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" podStartSLOduration=1.659987211 podStartE2EDuration="12.213382709s" podCreationTimestamp="2025-12-09 15:08:38 +0000 UTC" firstStartedPulling="2025-12-09 15:08:39.038024594 +0000 UTC m=+617.245473681" lastFinishedPulling="2025-12-09 15:08:49.591420092 +0000 UTC m=+627.798869179" observedRunningTime="2025-12-09 15:08:50.211412615 +0000 UTC m=+628.418861703" watchObservedRunningTime="2025-12-09 15:08:50.213382709 +0000 UTC m=+628.420831787" Dec 09 15:08:51 crc kubenswrapper[4583]: I1209 15:08:51.202734 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:51 crc kubenswrapper[4583]: I1209 15:08:51.209836 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-7ff5bd49dc-bf46s" Dec 09 15:08:58 crc kubenswrapper[4583]: I1209 15:08:58.377677 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-distributor-76cc67bf56-t8lbn" Dec 09 15:08:58 crc kubenswrapper[4583]: I1209 15:08:58.520001 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-querier-5895d59bb8-pvl24" Dec 09 15:08:58 crc kubenswrapper[4583]: I1209 15:08:58.570940 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-drm5l" Dec 09 15:08:59 crc kubenswrapper[4583]: I1209 15:08:59.557958 4583 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 09 15:08:59 crc kubenswrapper[4583]: I1209 15:08:59.558157 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc853021-5a5e-4f66-ab41-d70e667fa5f5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 09 15:08:59 crc kubenswrapper[4583]: I1209 15:08:59.564818 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-compactor-0" Dec 09 15:08:59 crc kubenswrapper[4583]: I1209 15:08:59.622186 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-index-gateway-0" Dec 09 15:09:09 crc kubenswrapper[4583]: I1209 15:09:09.556538 4583 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 09 15:09:09 crc kubenswrapper[4583]: I1209 15:09:09.557007 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc853021-5a5e-4f66-ab41-d70e667fa5f5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 09 15:09:19 crc kubenswrapper[4583]: I1209 15:09:19.556498 4583 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 09 15:09:19 crc kubenswrapper[4583]: I1209 15:09:19.556941 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc853021-5a5e-4f66-ab41-d70e667fa5f5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 09 15:09:29 crc kubenswrapper[4583]: I1209 15:09:29.556877 4583 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 09 15:09:29 crc kubenswrapper[4583]: I1209 15:09:29.557284 4583 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc853021-5a5e-4f66-ab41-d70e667fa5f5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 09 15:09:39 crc kubenswrapper[4583]: I1209 15:09:39.556763 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-ingester-0" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.862149 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-jrxqj"] Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.863225 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: W1209 15:09:57.865027 4583 reflector.go:561] object-"openshift-logging"/"collector-metrics": failed to list *v1.Secret: secrets "collector-metrics" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.865069 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"collector-metrics\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"collector-metrics\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 15:09:57 crc kubenswrapper[4583]: W1209 15:09:57.865027 4583 reflector.go:561] object-"openshift-logging"/"collector-token": failed to list *v1.Secret: secrets "collector-token" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.865094 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"collector-token\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"collector-token\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 15:09:57 crc kubenswrapper[4583]: W1209 15:09:57.865172 4583 reflector.go:561] object-"openshift-logging"/"collector-syslog-receiver": failed to list *v1.Secret: secrets "collector-syslog-receiver" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.865222 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"collector-syslog-receiver\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"collector-syslog-receiver\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 15:09:57 crc kubenswrapper[4583]: W1209 15:09:57.865286 4583 reflector.go:561] object-"openshift-logging"/"collector-config": failed to list *v1.ConfigMap: configmaps "collector-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.865298 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"collector-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"collector-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 15:09:57 crc kubenswrapper[4583]: W1209 15:09:57.865292 4583 reflector.go:561] object-"openshift-logging"/"collector-dockercfg-vdscv": failed to list *v1.Secret: secrets "collector-dockercfg-vdscv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.865321 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"collector-dockercfg-vdscv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"collector-dockercfg-vdscv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 15:09:57 crc kubenswrapper[4583]: W1209 15:09:57.865828 4583 reflector.go:561] object-"openshift-logging"/"collector-trustbundle": failed to list *v1.ConfigMap: configmaps "collector-trustbundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.865851 4583 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"collector-trustbundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"collector-trustbundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.876836 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-jrxqj"] Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.911677 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-jrxqj"] Dec 09 15:09:57 crc kubenswrapper[4583]: E1209 15:09:57.912060 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[collector-syslog-receiver collector-token config config-openshift-service-cacrt datadir entrypoint kube-api-access-k7xr5 metrics sa-token tmp trusted-ca], unattached volumes=[], failed to process volumes=[collector-syslog-receiver collector-token config config-openshift-service-cacrt datadir entrypoint kube-api-access-k7xr5 metrics sa-token tmp trusted-ca]: context canceled" pod="openshift-logging/collector-jrxqj" podUID="ca41b43f-1881-41fd-9336-34c8bb82a3bb" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963705 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config-openshift-service-cacrt\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963756 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ca41b43f-1881-41fd-9336-34c8bb82a3bb-tmp\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963796 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7xr5\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-kube-api-access-k7xr5\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963814 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-sa-token\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963828 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963848 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-syslog-receiver\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963864 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-token\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.963930 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-trusted-ca\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.964047 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-entrypoint\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.964085 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/ca41b43f-1881-41fd-9336-34c8bb82a3bb-datadir\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:57 crc kubenswrapper[4583]: I1209 15:09:57.964115 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-metrics\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065350 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7xr5\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-kube-api-access-k7xr5\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065395 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-sa-token\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065416 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065443 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-syslog-receiver\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065459 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-token\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065474 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-trusted-ca\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065522 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-entrypoint\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065547 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/ca41b43f-1881-41fd-9336-34c8bb82a3bb-datadir\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065569 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-metrics\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065611 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config-openshift-service-cacrt\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065630 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ca41b43f-1881-41fd-9336-34c8bb82a3bb-tmp\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.065956 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/ca41b43f-1881-41fd-9336-34c8bb82a3bb-datadir\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.066631 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config-openshift-service-cacrt\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.069987 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ca41b43f-1881-41fd-9336-34c8bb82a3bb-tmp\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.080086 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-sa-token\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.080340 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7xr5\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-kube-api-access-k7xr5\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.490507 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.496851 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571415 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/ca41b43f-1881-41fd-9336-34c8bb82a3bb-datadir\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571469 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ca41b43f-1881-41fd-9336-34c8bb82a3bb-datadir" (OuterVolumeSpecName: "datadir") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "datadir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571535 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ca41b43f-1881-41fd-9336-34c8bb82a3bb-tmp\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571584 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7xr5\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-kube-api-access-k7xr5\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571620 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-sa-token\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571639 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config-openshift-service-cacrt\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.571925 4583 reconciler_common.go:293] "Volume detached for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/ca41b43f-1881-41fd-9336-34c8bb82a3bb-datadir\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.572335 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config-openshift-service-cacrt" (OuterVolumeSpecName: "config-openshift-service-cacrt") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "config-openshift-service-cacrt". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.574169 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-sa-token" (OuterVolumeSpecName: "sa-token") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.574190 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-kube-api-access-k7xr5" (OuterVolumeSpecName: "kube-api-access-k7xr5") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "kube-api-access-k7xr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.574241 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca41b43f-1881-41fd-9336-34c8bb82a3bb-tmp" (OuterVolumeSpecName: "tmp") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "tmp". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.672654 4583 reconciler_common.go:293] "Volume detached for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ca41b43f-1881-41fd-9336-34c8bb82a3bb-tmp\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.672863 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7xr5\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-kube-api-access-k7xr5\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.672875 4583 reconciler_common.go:293] "Volume detached for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/ca41b43f-1881-41fd-9336-34c8bb82a3bb-sa-token\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.672884 4583 reconciler_common.go:293] "Volume detached for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config-openshift-service-cacrt\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.890738 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.899142 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-syslog-receiver\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.975541 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-syslog-receiver\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.977462 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-syslog-receiver" (OuterVolumeSpecName: "collector-syslog-receiver") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "collector-syslog-receiver". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.978024 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 09 15:09:58 crc kubenswrapper[4583]: I1209 15:09:58.986914 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-trusted-ca\") pod \"collector-jrxqj\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " pod="openshift-logging/collector-jrxqj" Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.066670 4583 configmap.go:193] Couldn't get configMap openshift-logging/collector-config: failed to sync configmap cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.066718 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-entrypoint podName:ca41b43f-1881-41fd-9336-34c8bb82a3bb nodeName:}" failed. No retries permitted until 2025-12-09 15:09:59.566705256 +0000 UTC m=+697.774154345 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "entrypoint" (UniqueName: "kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-entrypoint") pod "collector-jrxqj" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb") : failed to sync configmap cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.066761 4583 configmap.go:193] Couldn't get configMap openshift-logging/collector-config: failed to sync configmap cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.066807 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config podName:ca41b43f-1881-41fd-9336-34c8bb82a3bb nodeName:}" failed. No retries permitted until 2025-12-09 15:09:59.566792812 +0000 UTC m=+697.774241910 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config") pod "collector-jrxqj" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb") : failed to sync configmap cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.066838 4583 secret.go:188] Couldn't get secret openshift-logging/collector-token: failed to sync secret cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.066912 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-token podName:ca41b43f-1881-41fd-9336-34c8bb82a3bb nodeName:}" failed. No retries permitted until 2025-12-09 15:09:59.566895866 +0000 UTC m=+697.774344954 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "collector-token" (UniqueName: "kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-token") pod "collector-jrxqj" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb") : failed to sync secret cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.067020 4583 secret.go:188] Couldn't get secret openshift-logging/collector-metrics: failed to sync secret cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: E1209 15:09:59.067111 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-metrics podName:ca41b43f-1881-41fd-9336-34c8bb82a3bb nodeName:}" failed. No retries permitted until 2025-12-09 15:09:59.567101132 +0000 UTC m=+697.774550220 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics" (UniqueName: "kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-metrics") pod "collector-jrxqj" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb") : failed to sync secret cache: timed out waiting for the condition Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.076109 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-trusted-ca\") pod \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\" (UID: \"ca41b43f-1881-41fd-9336-34c8bb82a3bb\") " Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.076313 4583 reconciler_common.go:293] "Volume detached for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-syslog-receiver\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.076462 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ca41b43f-1881-41fd-9336-34c8bb82a3bb" (UID: "ca41b43f-1881-41fd-9336-34c8bb82a3bb"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.088474 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.176651 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.199011 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.229364 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.231959 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-vdscv" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.391174 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.391230 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.494735 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-jrxqj" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.522357 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-jrxqj"] Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.525327 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/collector-jrxqj"] Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.531427 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-bmpbq"] Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.532107 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.534551 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-vdscv" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.534815 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.534813 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.535503 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.535875 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.539620 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.541937 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-bmpbq"] Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.580761 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e622300e-10ba-43dc-92aa-53181f0be346-datadir\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.580802 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-config-openshift-service-cacrt\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.580840 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-metrics\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.580866 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-trusted-ca\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.580968 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r72fp\" (UniqueName: \"kubernetes.io/projected/e622300e-10ba-43dc-92aa-53181f0be346-kube-api-access-r72fp\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581008 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e622300e-10ba-43dc-92aa-53181f0be346-sa-token\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581049 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-collector-token\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581089 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-entrypoint\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581114 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-config\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581127 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e622300e-10ba-43dc-92aa-53181f0be346-tmp\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581140 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-collector-syslog-receiver\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581194 4583 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581213 4583 reconciler_common.go:293] "Volume detached for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-collector-token\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581224 4583 reconciler_common.go:293] "Volume detached for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/ca41b43f-1881-41fd-9336-34c8bb82a3bb-entrypoint\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.581232 4583 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/ca41b43f-1881-41fd-9336-34c8bb82a3bb-metrics\") on node \"crc\" DevicePath \"\"" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682535 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-trusted-ca\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682587 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r72fp\" (UniqueName: \"kubernetes.io/projected/e622300e-10ba-43dc-92aa-53181f0be346-kube-api-access-r72fp\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682615 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e622300e-10ba-43dc-92aa-53181f0be346-sa-token\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682647 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-collector-token\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682670 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-entrypoint\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682690 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-config\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682704 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e622300e-10ba-43dc-92aa-53181f0be346-tmp\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682719 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-collector-syslog-receiver\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682731 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e622300e-10ba-43dc-92aa-53181f0be346-datadir\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682770 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-config-openshift-service-cacrt\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.682796 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-metrics\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.683097 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e622300e-10ba-43dc-92aa-53181f0be346-datadir\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.683645 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-trusted-ca\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.684013 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-config\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.684144 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-entrypoint\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.684413 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e622300e-10ba-43dc-92aa-53181f0be346-config-openshift-service-cacrt\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.685839 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-collector-syslog-receiver\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.685917 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-metrics\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.686096 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e622300e-10ba-43dc-92aa-53181f0be346-tmp\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.686365 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e622300e-10ba-43dc-92aa-53181f0be346-collector-token\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.695827 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r72fp\" (UniqueName: \"kubernetes.io/projected/e622300e-10ba-43dc-92aa-53181f0be346-kube-api-access-r72fp\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.696058 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e622300e-10ba-43dc-92aa-53181f0be346-sa-token\") pod \"collector-bmpbq\" (UID: \"e622300e-10ba-43dc-92aa-53181f0be346\") " pod="openshift-logging/collector-bmpbq" Dec 09 15:09:59 crc kubenswrapper[4583]: I1209 15:09:59.847199 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-bmpbq" Dec 09 15:10:00 crc kubenswrapper[4583]: I1209 15:10:00.084545 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca41b43f-1881-41fd-9336-34c8bb82a3bb" path="/var/lib/kubelet/pods/ca41b43f-1881-41fd-9336-34c8bb82a3bb/volumes" Dec 09 15:10:00 crc kubenswrapper[4583]: I1209 15:10:00.184343 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-bmpbq"] Dec 09 15:10:00 crc kubenswrapper[4583]: I1209 15:10:00.499361 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-bmpbq" event={"ID":"e622300e-10ba-43dc-92aa-53181f0be346","Type":"ContainerStarted","Data":"ebda1692382a42d25221ea7717b9b4b6d2f929901ccdbe9e23fe42dd41222802"} Dec 09 15:10:08 crc kubenswrapper[4583]: I1209 15:10:08.532987 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-bmpbq" event={"ID":"e622300e-10ba-43dc-92aa-53181f0be346","Type":"ContainerStarted","Data":"da27af14afa83f5de00a93157204a6c805a3b3a6111805ebc9015f76626ae074"} Dec 09 15:10:08 crc kubenswrapper[4583]: I1209 15:10:08.548319 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-bmpbq" podStartSLOduration=1.9080594720000001 podStartE2EDuration="9.548306988s" podCreationTimestamp="2025-12-09 15:09:59 +0000 UTC" firstStartedPulling="2025-12-09 15:10:00.189771111 +0000 UTC m=+698.397220199" lastFinishedPulling="2025-12-09 15:10:07.830018627 +0000 UTC m=+706.037467715" observedRunningTime="2025-12-09 15:10:08.54572356 +0000 UTC m=+706.753172649" watchObservedRunningTime="2025-12-09 15:10:08.548306988 +0000 UTC m=+706.755756076" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.118505 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh"] Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.120364 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.121846 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.125945 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh"] Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.262860 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7qjt\" (UniqueName: \"kubernetes.io/projected/665e4cac-0f3c-48ee-a078-92ced9aaae38-kube-api-access-f7qjt\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.263342 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.263453 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.363946 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.363985 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.364024 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7qjt\" (UniqueName: \"kubernetes.io/projected/665e4cac-0f3c-48ee-a078-92ced9aaae38-kube-api-access-f7qjt\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.364412 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.364462 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.378883 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7qjt\" (UniqueName: \"kubernetes.io/projected/665e4cac-0f3c-48ee-a078-92ced9aaae38-kube-api-access-f7qjt\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.434548 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:26 crc kubenswrapper[4583]: I1209 15:10:26.775520 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh"] Dec 09 15:10:27 crc kubenswrapper[4583]: I1209 15:10:27.615562 4583 generic.go:334] "Generic (PLEG): container finished" podID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerID="58665aa95bbfd8874d3d12afd764277907137cf88d21aebd0412364bfa3ac503" exitCode=0 Dec 09 15:10:27 crc kubenswrapper[4583]: I1209 15:10:27.615602 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" event={"ID":"665e4cac-0f3c-48ee-a078-92ced9aaae38","Type":"ContainerDied","Data":"58665aa95bbfd8874d3d12afd764277907137cf88d21aebd0412364bfa3ac503"} Dec 09 15:10:27 crc kubenswrapper[4583]: I1209 15:10:27.615626 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" event={"ID":"665e4cac-0f3c-48ee-a078-92ced9aaae38","Type":"ContainerStarted","Data":"6de56068bea6fb0f45127e01d6bedbc95fa673da9e5ce35136c19dde63cee6a5"} Dec 09 15:10:29 crc kubenswrapper[4583]: I1209 15:10:29.392078 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:10:29 crc kubenswrapper[4583]: I1209 15:10:29.392685 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:10:29 crc kubenswrapper[4583]: I1209 15:10:29.625581 4583 generic.go:334] "Generic (PLEG): container finished" podID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerID="844204364cffb5b13c7c0c1bb5cd2c2fd82fefde69b9cf7c1133edba1a45f84e" exitCode=0 Dec 09 15:10:29 crc kubenswrapper[4583]: I1209 15:10:29.625626 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" event={"ID":"665e4cac-0f3c-48ee-a078-92ced9aaae38","Type":"ContainerDied","Data":"844204364cffb5b13c7c0c1bb5cd2c2fd82fefde69b9cf7c1133edba1a45f84e"} Dec 09 15:10:30 crc kubenswrapper[4583]: I1209 15:10:30.631810 4583 generic.go:334] "Generic (PLEG): container finished" podID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerID="39cd262e6cebac841aefbf158b7105f5c5fb4f12cda02e1344d0c740d073f06f" exitCode=0 Dec 09 15:10:30 crc kubenswrapper[4583]: I1209 15:10:30.631848 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" event={"ID":"665e4cac-0f3c-48ee-a078-92ced9aaae38","Type":"ContainerDied","Data":"39cd262e6cebac841aefbf158b7105f5c5fb4f12cda02e1344d0c740d073f06f"} Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.820313 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.824520 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-bundle\") pod \"665e4cac-0f3c-48ee-a078-92ced9aaae38\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.824580 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7qjt\" (UniqueName: \"kubernetes.io/projected/665e4cac-0f3c-48ee-a078-92ced9aaae38-kube-api-access-f7qjt\") pod \"665e4cac-0f3c-48ee-a078-92ced9aaae38\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.824630 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-util\") pod \"665e4cac-0f3c-48ee-a078-92ced9aaae38\" (UID: \"665e4cac-0f3c-48ee-a078-92ced9aaae38\") " Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.825798 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-bundle" (OuterVolumeSpecName: "bundle") pod "665e4cac-0f3c-48ee-a078-92ced9aaae38" (UID: "665e4cac-0f3c-48ee-a078-92ced9aaae38"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.828958 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/665e4cac-0f3c-48ee-a078-92ced9aaae38-kube-api-access-f7qjt" (OuterVolumeSpecName: "kube-api-access-f7qjt") pod "665e4cac-0f3c-48ee-a078-92ced9aaae38" (UID: "665e4cac-0f3c-48ee-a078-92ced9aaae38"). InnerVolumeSpecName "kube-api-access-f7qjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.835900 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-util" (OuterVolumeSpecName: "util") pod "665e4cac-0f3c-48ee-a078-92ced9aaae38" (UID: "665e4cac-0f3c-48ee-a078-92ced9aaae38"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.925927 4583 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.925958 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7qjt\" (UniqueName: \"kubernetes.io/projected/665e4cac-0f3c-48ee-a078-92ced9aaae38-kube-api-access-f7qjt\") on node \"crc\" DevicePath \"\"" Dec 09 15:10:31 crc kubenswrapper[4583]: I1209 15:10:31.925969 4583 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665e4cac-0f3c-48ee-a078-92ced9aaae38-util\") on node \"crc\" DevicePath \"\"" Dec 09 15:10:32 crc kubenswrapper[4583]: I1209 15:10:32.642164 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" event={"ID":"665e4cac-0f3c-48ee-a078-92ced9aaae38","Type":"ContainerDied","Data":"6de56068bea6fb0f45127e01d6bedbc95fa673da9e5ce35136c19dde63cee6a5"} Dec 09 15:10:32 crc kubenswrapper[4583]: I1209 15:10:32.642367 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6de56068bea6fb0f45127e01d6bedbc95fa673da9e5ce35136c19dde63cee6a5" Dec 09 15:10:32 crc kubenswrapper[4583]: I1209 15:10:32.642211 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.550556 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7"] Dec 09 15:10:34 crc kubenswrapper[4583]: E1209 15:10:34.550739 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="pull" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.550762 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="pull" Dec 09 15:10:34 crc kubenswrapper[4583]: E1209 15:10:34.550773 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="util" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.550779 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="util" Dec 09 15:10:34 crc kubenswrapper[4583]: E1209 15:10:34.550785 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="extract" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.550790 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="extract" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.550876 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="665e4cac-0f3c-48ee-a078-92ced9aaae38" containerName="extract" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.551235 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.552531 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-mn525" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.552650 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.552803 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.560146 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7"] Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.653032 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rwfm\" (UniqueName: \"kubernetes.io/projected/dffc95f7-da8f-4b23-b2b5-0b30e9233a08-kube-api-access-5rwfm\") pod \"nmstate-operator-5b5b58f5c8-hkrd7\" (UID: \"dffc95f7-da8f-4b23-b2b5-0b30e9233a08\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.754091 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rwfm\" (UniqueName: \"kubernetes.io/projected/dffc95f7-da8f-4b23-b2b5-0b30e9233a08-kube-api-access-5rwfm\") pod \"nmstate-operator-5b5b58f5c8-hkrd7\" (UID: \"dffc95f7-da8f-4b23-b2b5-0b30e9233a08\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.768466 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rwfm\" (UniqueName: \"kubernetes.io/projected/dffc95f7-da8f-4b23-b2b5-0b30e9233a08-kube-api-access-5rwfm\") pod \"nmstate-operator-5b5b58f5c8-hkrd7\" (UID: \"dffc95f7-da8f-4b23-b2b5-0b30e9233a08\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" Dec 09 15:10:34 crc kubenswrapper[4583]: I1209 15:10:34.864902 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" Dec 09 15:10:35 crc kubenswrapper[4583]: I1209 15:10:35.202768 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7"] Dec 09 15:10:35 crc kubenswrapper[4583]: I1209 15:10:35.654371 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" event={"ID":"dffc95f7-da8f-4b23-b2b5-0b30e9233a08","Type":"ContainerStarted","Data":"fda18aa2497fd067f6fb2e4a9c666bdef2b0d3dc1b834ef39c79b8212460bf89"} Dec 09 15:10:37 crc kubenswrapper[4583]: I1209 15:10:37.665044 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" event={"ID":"dffc95f7-da8f-4b23-b2b5-0b30e9233a08","Type":"ContainerStarted","Data":"e5fd7d300ab8148257cd0b1ba5fc444d96bfe0c882f3a4296f5e77c28bbb7e6c"} Dec 09 15:10:37 crc kubenswrapper[4583]: I1209 15:10:37.678082 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hkrd7" podStartSLOduration=1.97636429 podStartE2EDuration="3.678067428s" podCreationTimestamp="2025-12-09 15:10:34 +0000 UTC" firstStartedPulling="2025-12-09 15:10:35.20801699 +0000 UTC m=+733.415466079" lastFinishedPulling="2025-12-09 15:10:36.90972013 +0000 UTC m=+735.117169217" observedRunningTime="2025-12-09 15:10:37.674557428 +0000 UTC m=+735.882006515" watchObservedRunningTime="2025-12-09 15:10:37.678067428 +0000 UTC m=+735.885516516" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.397592 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.398818 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.400360 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-9fvfv" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.409156 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.409884 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.411331 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.413853 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.416654 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-ngtg2"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.421628 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.448068 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500262 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-nmstate-lock\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500326 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-ovs-socket\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500367 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fm8f\" (UniqueName: \"kubernetes.io/projected/e20829fc-8615-47ac-8a52-00bdad76e975-kube-api-access-2fm8f\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500407 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8kw9\" (UniqueName: \"kubernetes.io/projected/ae1b2371-96e9-4b20-b9e5-df312ff49a42-kube-api-access-b8kw9\") pod \"nmstate-metrics-7f946cbc9-stxfm\" (UID: \"ae1b2371-96e9-4b20-b9e5-df312ff49a42\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500421 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-dbus-socket\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500443 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e20829fc-8615-47ac-8a52-00bdad76e975-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.500494 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pgmj\" (UniqueName: \"kubernetes.io/projected/94f96eaa-6548-4dbf-bcb6-50c416335322-kube-api-access-5pgmj\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.528383 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.529018 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.532066 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.532264 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.532392 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-74vx9" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.547447 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601489 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pgmj\" (UniqueName: \"kubernetes.io/projected/94f96eaa-6548-4dbf-bcb6-50c416335322-kube-api-access-5pgmj\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601561 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-nmstate-lock\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601586 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-ovs-socket\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601613 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/457b330f-11f8-4f32-a2df-86568457b8bd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601641 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntsfv\" (UniqueName: \"kubernetes.io/projected/457b330f-11f8-4f32-a2df-86568457b8bd-kube-api-access-ntsfv\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601655 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-nmstate-lock\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601665 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fm8f\" (UniqueName: \"kubernetes.io/projected/e20829fc-8615-47ac-8a52-00bdad76e975-kube-api-access-2fm8f\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601730 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-ovs-socket\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601784 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8kw9\" (UniqueName: \"kubernetes.io/projected/ae1b2371-96e9-4b20-b9e5-df312ff49a42-kube-api-access-b8kw9\") pod \"nmstate-metrics-7f946cbc9-stxfm\" (UID: \"ae1b2371-96e9-4b20-b9e5-df312ff49a42\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601807 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-dbus-socket\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601837 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e20829fc-8615-47ac-8a52-00bdad76e975-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.601906 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/457b330f-11f8-4f32-a2df-86568457b8bd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: E1209 15:10:38.602035 4583 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 09 15:10:38 crc kubenswrapper[4583]: E1209 15:10:38.602085 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e20829fc-8615-47ac-8a52-00bdad76e975-tls-key-pair podName:e20829fc-8615-47ac-8a52-00bdad76e975 nodeName:}" failed. No retries permitted until 2025-12-09 15:10:39.102069463 +0000 UTC m=+737.309518551 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/e20829fc-8615-47ac-8a52-00bdad76e975-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-m4rct" (UID: "e20829fc-8615-47ac-8a52-00bdad76e975") : secret "openshift-nmstate-webhook" not found Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.602040 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/94f96eaa-6548-4dbf-bcb6-50c416335322-dbus-socket\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.616370 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fm8f\" (UniqueName: \"kubernetes.io/projected/e20829fc-8615-47ac-8a52-00bdad76e975-kube-api-access-2fm8f\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.616767 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8kw9\" (UniqueName: \"kubernetes.io/projected/ae1b2371-96e9-4b20-b9e5-df312ff49a42-kube-api-access-b8kw9\") pod \"nmstate-metrics-7f946cbc9-stxfm\" (UID: \"ae1b2371-96e9-4b20-b9e5-df312ff49a42\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.617038 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pgmj\" (UniqueName: \"kubernetes.io/projected/94f96eaa-6548-4dbf-bcb6-50c416335322-kube-api-access-5pgmj\") pod \"nmstate-handler-ngtg2\" (UID: \"94f96eaa-6548-4dbf-bcb6-50c416335322\") " pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.702329 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntsfv\" (UniqueName: \"kubernetes.io/projected/457b330f-11f8-4f32-a2df-86568457b8bd-kube-api-access-ntsfv\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.702402 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/457b330f-11f8-4f32-a2df-86568457b8bd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.702461 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/457b330f-11f8-4f32-a2df-86568457b8bd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.703218 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/457b330f-11f8-4f32-a2df-86568457b8bd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.704562 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-c45d9bb8b-tsv5t"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.705468 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.705790 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/457b330f-11f8-4f32-a2df-86568457b8bd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.711725 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.720482 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c45d9bb8b-tsv5t"] Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.733326 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntsfv\" (UniqueName: \"kubernetes.io/projected/457b330f-11f8-4f32-a2df-86568457b8bd-kube-api-access-ntsfv\") pod \"nmstate-console-plugin-7fbb5f6569-tjcbp\" (UID: \"457b330f-11f8-4f32-a2df-86568457b8bd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.747312 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:38 crc kubenswrapper[4583]: W1209 15:10:38.774943 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94f96eaa_6548_4dbf_bcb6_50c416335322.slice/crio-fd0be0e542fc098d798245b98588437cf5dc8aa099afdb6c170d8f931f27504d WatchSource:0}: Error finding container fd0be0e542fc098d798245b98588437cf5dc8aa099afdb6c170d8f931f27504d: Status 404 returned error can't find the container with id fd0be0e542fc098d798245b98588437cf5dc8aa099afdb6c170d8f931f27504d Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.855002 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906451 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-oauth-serving-cert\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906526 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4rdv\" (UniqueName: \"kubernetes.io/projected/be69fee0-5459-452a-b8e9-ba010ea680b4-kube-api-access-c4rdv\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906580 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/be69fee0-5459-452a-b8e9-ba010ea680b4-console-serving-cert\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906615 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-console-config\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906635 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-service-ca\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906657 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/be69fee0-5459-452a-b8e9-ba010ea680b4-console-oauth-config\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:38 crc kubenswrapper[4583]: I1209 15:10:38.906675 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-trusted-ca-bundle\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.007317 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/be69fee0-5459-452a-b8e9-ba010ea680b4-console-serving-cert\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.007517 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-console-config\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.007540 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-service-ca\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.008157 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/be69fee0-5459-452a-b8e9-ba010ea680b4-console-oauth-config\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.008212 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-trusted-ca-bundle\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.008237 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-oauth-serving-cert\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.008257 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4rdv\" (UniqueName: \"kubernetes.io/projected/be69fee0-5459-452a-b8e9-ba010ea680b4-kube-api-access-c4rdv\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.008397 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-console-config\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.009142 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-service-ca\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.009150 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-oauth-serving-cert\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.009438 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be69fee0-5459-452a-b8e9-ba010ea680b4-trusted-ca-bundle\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.011815 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/be69fee0-5459-452a-b8e9-ba010ea680b4-console-serving-cert\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.012588 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/be69fee0-5459-452a-b8e9-ba010ea680b4-console-oauth-config\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.021257 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4rdv\" (UniqueName: \"kubernetes.io/projected/be69fee0-5459-452a-b8e9-ba010ea680b4-kube-api-access-c4rdv\") pod \"console-c45d9bb8b-tsv5t\" (UID: \"be69fee0-5459-452a-b8e9-ba010ea680b4\") " pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.074030 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.090926 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm"] Dec 09 15:10:39 crc kubenswrapper[4583]: W1209 15:10:39.094660 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae1b2371_96e9_4b20_b9e5_df312ff49a42.slice/crio-52559925e169886f9b6ec0dbd3abf81799f61f1ee48c2f459f184f317c18116b WatchSource:0}: Error finding container 52559925e169886f9b6ec0dbd3abf81799f61f1ee48c2f459f184f317c18116b: Status 404 returned error can't find the container with id 52559925e169886f9b6ec0dbd3abf81799f61f1ee48c2f459f184f317c18116b Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.109067 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e20829fc-8615-47ac-8a52-00bdad76e975-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.111484 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e20829fc-8615-47ac-8a52-00bdad76e975-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-m4rct\" (UID: \"e20829fc-8615-47ac-8a52-00bdad76e975\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.197453 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp"] Dec 09 15:10:39 crc kubenswrapper[4583]: W1209 15:10:39.201091 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod457b330f_11f8_4f32_a2df_86568457b8bd.slice/crio-3a54cfc645867afe63732589d5fd06b2a4172c988a5aceb9f829686ba4aaa822 WatchSource:0}: Error finding container 3a54cfc645867afe63732589d5fd06b2a4172c988a5aceb9f829686ba4aaa822: Status 404 returned error can't find the container with id 3a54cfc645867afe63732589d5fd06b2a4172c988a5aceb9f829686ba4aaa822 Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.322186 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.409031 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c45d9bb8b-tsv5t"] Dec 09 15:10:39 crc kubenswrapper[4583]: W1209 15:10:39.415246 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe69fee0_5459_452a_b8e9_ba010ea680b4.slice/crio-70ef7a4b189f411bdd4f8b7b5d46d7ec4a2ee9192c3545f19235ec2f27ee0af8 WatchSource:0}: Error finding container 70ef7a4b189f411bdd4f8b7b5d46d7ec4a2ee9192c3545f19235ec2f27ee0af8: Status 404 returned error can't find the container with id 70ef7a4b189f411bdd4f8b7b5d46d7ec4a2ee9192c3545f19235ec2f27ee0af8 Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.658187 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct"] Dec 09 15:10:39 crc kubenswrapper[4583]: W1209 15:10:39.662091 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode20829fc_8615_47ac_8a52_00bdad76e975.slice/crio-54ceb77b3fab1defe31b746614fbcf3daeb6a980e2e4b9ae380c35d31666d1e0 WatchSource:0}: Error finding container 54ceb77b3fab1defe31b746614fbcf3daeb6a980e2e4b9ae380c35d31666d1e0: Status 404 returned error can't find the container with id 54ceb77b3fab1defe31b746614fbcf3daeb6a980e2e4b9ae380c35d31666d1e0 Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.673989 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" event={"ID":"e20829fc-8615-47ac-8a52-00bdad76e975","Type":"ContainerStarted","Data":"54ceb77b3fab1defe31b746614fbcf3daeb6a980e2e4b9ae380c35d31666d1e0"} Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.675160 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ngtg2" event={"ID":"94f96eaa-6548-4dbf-bcb6-50c416335322","Type":"ContainerStarted","Data":"fd0be0e542fc098d798245b98588437cf5dc8aa099afdb6c170d8f931f27504d"} Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.676143 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" event={"ID":"457b330f-11f8-4f32-a2df-86568457b8bd","Type":"ContainerStarted","Data":"3a54cfc645867afe63732589d5fd06b2a4172c988a5aceb9f829686ba4aaa822"} Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.677525 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c45d9bb8b-tsv5t" event={"ID":"be69fee0-5459-452a-b8e9-ba010ea680b4","Type":"ContainerStarted","Data":"a39f2834b7ac4653b6512d7d5c2461d011225d89e9401685f7e21cabe7d2db63"} Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.677564 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c45d9bb8b-tsv5t" event={"ID":"be69fee0-5459-452a-b8e9-ba010ea680b4","Type":"ContainerStarted","Data":"70ef7a4b189f411bdd4f8b7b5d46d7ec4a2ee9192c3545f19235ec2f27ee0af8"} Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.678353 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" event={"ID":"ae1b2371-96e9-4b20-b9e5-df312ff49a42","Type":"ContainerStarted","Data":"52559925e169886f9b6ec0dbd3abf81799f61f1ee48c2f459f184f317c18116b"} Dec 09 15:10:39 crc kubenswrapper[4583]: I1209 15:10:39.690975 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-c45d9bb8b-tsv5t" podStartSLOduration=1.6908690320000002 podStartE2EDuration="1.690869032s" podCreationTimestamp="2025-12-09 15:10:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:10:39.688605209 +0000 UTC m=+737.896054297" watchObservedRunningTime="2025-12-09 15:10:39.690869032 +0000 UTC m=+737.898318120" Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.692973 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" event={"ID":"ae1b2371-96e9-4b20-b9e5-df312ff49a42","Type":"ContainerStarted","Data":"54c92d54413262139014adef0ab88d206d4123aaa6a362f449306bdb7f1723ba"} Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.694144 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" event={"ID":"e20829fc-8615-47ac-8a52-00bdad76e975","Type":"ContainerStarted","Data":"78354d22adf11ee36748787321483c439309c023401a9ae65ec646a91d542b2d"} Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.694253 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.695434 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ngtg2" event={"ID":"94f96eaa-6548-4dbf-bcb6-50c416335322","Type":"ContainerStarted","Data":"ec4bdcbbc9ed4b431f4e660d8ec7fae9593573c3bf2db5a3169a31944df072f3"} Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.695524 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.696417 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" event={"ID":"457b330f-11f8-4f32-a2df-86568457b8bd","Type":"ContainerStarted","Data":"47dbede39fb5c7ac3bcb0865a9691955dd4deab895a9edf7990b27d73816c95b"} Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.708292 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" podStartSLOduration=2.718221713 podStartE2EDuration="3.708278004s" podCreationTimestamp="2025-12-09 15:10:38 +0000 UTC" firstStartedPulling="2025-12-09 15:10:39.665806183 +0000 UTC m=+737.873255271" lastFinishedPulling="2025-12-09 15:10:40.655862474 +0000 UTC m=+738.863311562" observedRunningTime="2025-12-09 15:10:41.704471355 +0000 UTC m=+739.911920442" watchObservedRunningTime="2025-12-09 15:10:41.708278004 +0000 UTC m=+739.915727091" Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.717595 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-ngtg2" podStartSLOduration=1.838997505 podStartE2EDuration="3.717583859s" podCreationTimestamp="2025-12-09 15:10:38 +0000 UTC" firstStartedPulling="2025-12-09 15:10:38.778067871 +0000 UTC m=+736.985516959" lastFinishedPulling="2025-12-09 15:10:40.656654235 +0000 UTC m=+738.864103313" observedRunningTime="2025-12-09 15:10:41.714499881 +0000 UTC m=+739.921948969" watchObservedRunningTime="2025-12-09 15:10:41.717583859 +0000 UTC m=+739.925032947" Dec 09 15:10:41 crc kubenswrapper[4583]: I1209 15:10:41.726786 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tjcbp" podStartSLOduration=1.595068827 podStartE2EDuration="3.726771621s" podCreationTimestamp="2025-12-09 15:10:38 +0000 UTC" firstStartedPulling="2025-12-09 15:10:39.202463343 +0000 UTC m=+737.409912432" lastFinishedPulling="2025-12-09 15:10:41.334166139 +0000 UTC m=+739.541615226" observedRunningTime="2025-12-09 15:10:41.725436066 +0000 UTC m=+739.932885154" watchObservedRunningTime="2025-12-09 15:10:41.726771621 +0000 UTC m=+739.934220710" Dec 09 15:10:43 crc kubenswrapper[4583]: I1209 15:10:43.707406 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" event={"ID":"ae1b2371-96e9-4b20-b9e5-df312ff49a42","Type":"ContainerStarted","Data":"600a7eada036dacab7ce8ae0244f38712134a29f1b3e4cc90971bcd5f09fbda8"} Dec 09 15:10:43 crc kubenswrapper[4583]: I1209 15:10:43.719256 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-stxfm" podStartSLOduration=1.7128690739999999 podStartE2EDuration="5.719243411s" podCreationTimestamp="2025-12-09 15:10:38 +0000 UTC" firstStartedPulling="2025-12-09 15:10:39.096465794 +0000 UTC m=+737.303914881" lastFinishedPulling="2025-12-09 15:10:43.10284013 +0000 UTC m=+741.310289218" observedRunningTime="2025-12-09 15:10:43.717866667 +0000 UTC m=+741.925315755" watchObservedRunningTime="2025-12-09 15:10:43.719243411 +0000 UTC m=+741.926692499" Dec 09 15:10:48 crc kubenswrapper[4583]: I1209 15:10:48.763961 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-ngtg2" Dec 09 15:10:49 crc kubenswrapper[4583]: I1209 15:10:49.075024 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:49 crc kubenswrapper[4583]: I1209 15:10:49.075062 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:49 crc kubenswrapper[4583]: I1209 15:10:49.079020 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:49 crc kubenswrapper[4583]: I1209 15:10:49.578381 4583 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 09 15:10:49 crc kubenswrapper[4583]: I1209 15:10:49.737807 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-c45d9bb8b-tsv5t" Dec 09 15:10:49 crc kubenswrapper[4583]: I1209 15:10:49.768080 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-n85fv"] Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.326123 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-m4rct" Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.392129 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.392173 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.392220 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.392618 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b71e264e7c07931c21a874e87d0e92983343bc552a933cf51a341ffed03af7a"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.392669 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://2b71e264e7c07931c21a874e87d0e92983343bc552a933cf51a341ffed03af7a" gracePeriod=600 Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.776523 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="2b71e264e7c07931c21a874e87d0e92983343bc552a933cf51a341ffed03af7a" exitCode=0 Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.776585 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"2b71e264e7c07931c21a874e87d0e92983343bc552a933cf51a341ffed03af7a"} Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.776694 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"9a6621cc1017866814b05715a584edc254f99c224f8d97253065024c4456b9e5"} Dec 09 15:10:59 crc kubenswrapper[4583]: I1209 15:10:59.776710 4583 scope.go:117] "RemoveContainer" containerID="e5d33c84ff6a93e32880d70a3785cf1929995f7d39b05755a27bd726e3b26c85" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.713086 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48"] Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.714378 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.715870 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.723772 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48"] Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.755304 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.755374 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.755401 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77l4d\" (UniqueName: \"kubernetes.io/projected/3ff4a7f6-d61d-4fae-9806-b831561f37a5-kube-api-access-77l4d\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.856563 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77l4d\" (UniqueName: \"kubernetes.io/projected/3ff4a7f6-d61d-4fae-9806-b831561f37a5-kube-api-access-77l4d\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.856638 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.856689 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.857088 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.857137 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:09 crc kubenswrapper[4583]: I1209 15:11:09.870866 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77l4d\" (UniqueName: \"kubernetes.io/projected/3ff4a7f6-d61d-4fae-9806-b831561f37a5-kube-api-access-77l4d\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:10 crc kubenswrapper[4583]: I1209 15:11:10.027682 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:10 crc kubenswrapper[4583]: I1209 15:11:10.381219 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48"] Dec 09 15:11:10 crc kubenswrapper[4583]: I1209 15:11:10.836651 4583 generic.go:334] "Generic (PLEG): container finished" podID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerID="6bc1c2c3c5e48ff8ae7a975b6a38444a0c569295ab0ff736d988e31e66b80d41" exitCode=0 Dec 09 15:11:10 crc kubenswrapper[4583]: I1209 15:11:10.836722 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" event={"ID":"3ff4a7f6-d61d-4fae-9806-b831561f37a5","Type":"ContainerDied","Data":"6bc1c2c3c5e48ff8ae7a975b6a38444a0c569295ab0ff736d988e31e66b80d41"} Dec 09 15:11:10 crc kubenswrapper[4583]: I1209 15:11:10.836871 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" event={"ID":"3ff4a7f6-d61d-4fae-9806-b831561f37a5","Type":"ContainerStarted","Data":"93272e91763653152d0fad5a02007edcb72456b38c98a93c59bf4fa0480caba6"} Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.662357 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bzqjv"] Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.663345 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.669159 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bzqjv"] Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.675471 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-catalog-content\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.675512 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmgfb\" (UniqueName: \"kubernetes.io/projected/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-kube-api-access-fmgfb\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.675539 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-utilities\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.776233 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-catalog-content\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.776292 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmgfb\" (UniqueName: \"kubernetes.io/projected/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-kube-api-access-fmgfb\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.776325 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-utilities\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.776718 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-catalog-content\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.776768 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-utilities\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.793524 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmgfb\" (UniqueName: \"kubernetes.io/projected/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-kube-api-access-fmgfb\") pod \"redhat-operators-bzqjv\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:11 crc kubenswrapper[4583]: I1209 15:11:11.973984 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:12 crc kubenswrapper[4583]: I1209 15:11:12.322963 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bzqjv"] Dec 09 15:11:12 crc kubenswrapper[4583]: I1209 15:11:12.845649 4583 generic.go:334] "Generic (PLEG): container finished" podID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerID="00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071" exitCode=0 Dec 09 15:11:12 crc kubenswrapper[4583]: I1209 15:11:12.845728 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerDied","Data":"00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071"} Dec 09 15:11:12 crc kubenswrapper[4583]: I1209 15:11:12.845778 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerStarted","Data":"610735196b41a77069c3155ca39c80b2e91392749c932913b4cf583e91c75e6f"} Dec 09 15:11:12 crc kubenswrapper[4583]: I1209 15:11:12.847858 4583 generic.go:334] "Generic (PLEG): container finished" podID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerID="3e7f5007182a1bbade35a498f03746f5a4e96850821d7e86176ee34869f6534c" exitCode=0 Dec 09 15:11:12 crc kubenswrapper[4583]: I1209 15:11:12.847901 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" event={"ID":"3ff4a7f6-d61d-4fae-9806-b831561f37a5","Type":"ContainerDied","Data":"3e7f5007182a1bbade35a498f03746f5a4e96850821d7e86176ee34869f6534c"} Dec 09 15:11:13 crc kubenswrapper[4583]: I1209 15:11:13.853543 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerStarted","Data":"b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e"} Dec 09 15:11:13 crc kubenswrapper[4583]: I1209 15:11:13.855253 4583 generic.go:334] "Generic (PLEG): container finished" podID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerID="7c15438b125b6f26b76461c58591c7f1c6952af43764efe7f0fb8eeeb5a428aa" exitCode=0 Dec 09 15:11:13 crc kubenswrapper[4583]: I1209 15:11:13.855283 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" event={"ID":"3ff4a7f6-d61d-4fae-9806-b831561f37a5","Type":"ContainerDied","Data":"7c15438b125b6f26b76461c58591c7f1c6952af43764efe7f0fb8eeeb5a428aa"} Dec 09 15:11:14 crc kubenswrapper[4583]: I1209 15:11:14.800094 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-n85fv" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerName="console" containerID="cri-o://ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393" gracePeriod=15 Dec 09 15:11:14 crc kubenswrapper[4583]: I1209 15:11:14.860689 4583 generic.go:334] "Generic (PLEG): container finished" podID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerID="b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e" exitCode=0 Dec 09 15:11:14 crc kubenswrapper[4583]: I1209 15:11:14.860912 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerDied","Data":"b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e"} Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.069412 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.113662 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-util\") pod \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.113734 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-bundle\") pod \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.113786 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77l4d\" (UniqueName: \"kubernetes.io/projected/3ff4a7f6-d61d-4fae-9806-b831561f37a5-kube-api-access-77l4d\") pod \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\" (UID: \"3ff4a7f6-d61d-4fae-9806-b831561f37a5\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.114480 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-bundle" (OuterVolumeSpecName: "bundle") pod "3ff4a7f6-d61d-4fae-9806-b831561f37a5" (UID: "3ff4a7f6-d61d-4fae-9806-b831561f37a5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.118606 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ff4a7f6-d61d-4fae-9806-b831561f37a5-kube-api-access-77l4d" (OuterVolumeSpecName: "kube-api-access-77l4d") pod "3ff4a7f6-d61d-4fae-9806-b831561f37a5" (UID: "3ff4a7f6-d61d-4fae-9806-b831561f37a5"). InnerVolumeSpecName "kube-api-access-77l4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.124500 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-util" (OuterVolumeSpecName: "util") pod "3ff4a7f6-d61d-4fae-9806-b831561f37a5" (UID: "3ff4a7f6-d61d-4fae-9806-b831561f37a5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.137665 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-n85fv_63f028f1-f3fc-46f9-9eec-471fa53871ed/console/0.log" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.137722 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214672 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-oauth-config\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214720 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4gp7\" (UniqueName: \"kubernetes.io/projected/63f028f1-f3fc-46f9-9eec-471fa53871ed-kube-api-access-q4gp7\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214769 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-oauth-serving-cert\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214797 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-config\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214827 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-serving-cert\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214845 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-service-ca\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.214929 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-trusted-ca-bundle\") pod \"63f028f1-f3fc-46f9-9eec-471fa53871ed\" (UID: \"63f028f1-f3fc-46f9-9eec-471fa53871ed\") " Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215510 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215517 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-service-ca" (OuterVolumeSpecName: "service-ca") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215533 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215589 4583 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-util\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215601 4583 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-service-ca\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215590 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-config" (OuterVolumeSpecName: "console-config") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215610 4583 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3ff4a7f6-d61d-4fae-9806-b831561f37a5-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215660 4583 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215673 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77l4d\" (UniqueName: \"kubernetes.io/projected/3ff4a7f6-d61d-4fae-9806-b831561f37a5-kube-api-access-77l4d\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.215682 4583 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.217728 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.217811 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63f028f1-f3fc-46f9-9eec-471fa53871ed-kube-api-access-q4gp7" (OuterVolumeSpecName: "kube-api-access-q4gp7") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "kube-api-access-q4gp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.217841 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "63f028f1-f3fc-46f9-9eec-471fa53871ed" (UID: "63f028f1-f3fc-46f9-9eec-471fa53871ed"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.316027 4583 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.316048 4583 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.316058 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4gp7\" (UniqueName: \"kubernetes.io/projected/63f028f1-f3fc-46f9-9eec-471fa53871ed-kube-api-access-q4gp7\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.316068 4583 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/63f028f1-f3fc-46f9-9eec-471fa53871ed-console-config\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.866884 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerStarted","Data":"b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3"} Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.868375 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" event={"ID":"3ff4a7f6-d61d-4fae-9806-b831561f37a5","Type":"ContainerDied","Data":"93272e91763653152d0fad5a02007edcb72456b38c98a93c59bf4fa0480caba6"} Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.868410 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93272e91763653152d0fad5a02007edcb72456b38c98a93c59bf4fa0480caba6" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.868390 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.869700 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-n85fv_63f028f1-f3fc-46f9-9eec-471fa53871ed/console/0.log" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.869739 4583 generic.go:334] "Generic (PLEG): container finished" podID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerID="ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393" exitCode=2 Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.869791 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-n85fv" event={"ID":"63f028f1-f3fc-46f9-9eec-471fa53871ed","Type":"ContainerDied","Data":"ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393"} Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.869803 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-n85fv" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.869811 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-n85fv" event={"ID":"63f028f1-f3fc-46f9-9eec-471fa53871ed","Type":"ContainerDied","Data":"635166240af60ae7df89fa30974c7e83d9d6f01b0bd88c84a0c6f0d3a1fcc63d"} Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.869821 4583 scope.go:117] "RemoveContainer" containerID="ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.884034 4583 scope.go:117] "RemoveContainer" containerID="ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.884235 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bzqjv" podStartSLOduration=2.418659482 podStartE2EDuration="4.884225335s" podCreationTimestamp="2025-12-09 15:11:11 +0000 UTC" firstStartedPulling="2025-12-09 15:11:12.846767019 +0000 UTC m=+771.054216106" lastFinishedPulling="2025-12-09 15:11:15.312332871 +0000 UTC m=+773.519781959" observedRunningTime="2025-12-09 15:11:15.8816579 +0000 UTC m=+774.089106989" watchObservedRunningTime="2025-12-09 15:11:15.884225335 +0000 UTC m=+774.091674424" Dec 09 15:11:15 crc kubenswrapper[4583]: E1209 15:11:15.884337 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393\": container with ID starting with ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393 not found: ID does not exist" containerID="ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.884369 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393"} err="failed to get container status \"ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393\": rpc error: code = NotFound desc = could not find container \"ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393\": container with ID starting with ed8d2ec3cf7ce78b994653e5b071445ed0ff1a557a8682e50c06b969ccce6393 not found: ID does not exist" Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.892227 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-n85fv"] Dec 09 15:11:15 crc kubenswrapper[4583]: I1209 15:11:15.895666 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-n85fv"] Dec 09 15:11:16 crc kubenswrapper[4583]: I1209 15:11:16.084635 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" path="/var/lib/kubelet/pods/63f028f1-f3fc-46f9-9eec-471fa53871ed/volumes" Dec 09 15:11:21 crc kubenswrapper[4583]: I1209 15:11:21.975095 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:21 crc kubenswrapper[4583]: I1209 15:11:21.975437 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:22 crc kubenswrapper[4583]: I1209 15:11:22.003524 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:22 crc kubenswrapper[4583]: I1209 15:11:22.926426 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:23 crc kubenswrapper[4583]: I1209 15:11:23.856880 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bzqjv"] Dec 09 15:11:24 crc kubenswrapper[4583]: I1209 15:11:24.908680 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bzqjv" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="registry-server" containerID="cri-o://b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3" gracePeriod=2 Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.224930 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.334603 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmgfb\" (UniqueName: \"kubernetes.io/projected/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-kube-api-access-fmgfb\") pod \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.334645 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-catalog-content\") pod \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.334697 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-utilities\") pod \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\" (UID: \"21ac83ea-ca63-4af3-84fa-75d38d58a3b6\") " Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.335398 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-utilities" (OuterVolumeSpecName: "utilities") pod "21ac83ea-ca63-4af3-84fa-75d38d58a3b6" (UID: "21ac83ea-ca63-4af3-84fa-75d38d58a3b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.339685 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-kube-api-access-fmgfb" (OuterVolumeSpecName: "kube-api-access-fmgfb") pod "21ac83ea-ca63-4af3-84fa-75d38d58a3b6" (UID: "21ac83ea-ca63-4af3-84fa-75d38d58a3b6"). InnerVolumeSpecName "kube-api-access-fmgfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.411558 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21ac83ea-ca63-4af3-84fa-75d38d58a3b6" (UID: "21ac83ea-ca63-4af3-84fa-75d38d58a3b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.435899 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmgfb\" (UniqueName: \"kubernetes.io/projected/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-kube-api-access-fmgfb\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.436146 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.436229 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ac83ea-ca63-4af3-84fa-75d38d58a3b6-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.955560 4583 generic.go:334] "Generic (PLEG): container finished" podID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerID="b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3" exitCode=0 Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.955603 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerDied","Data":"b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3"} Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.955627 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzqjv" event={"ID":"21ac83ea-ca63-4af3-84fa-75d38d58a3b6","Type":"ContainerDied","Data":"610735196b41a77069c3155ca39c80b2e91392749c932913b4cf583e91c75e6f"} Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.955634 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzqjv" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.955642 4583 scope.go:117] "RemoveContainer" containerID="b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.979546 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bzqjv"] Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.980271 4583 scope.go:117] "RemoveContainer" containerID="b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e" Dec 09 15:11:25 crc kubenswrapper[4583]: I1209 15:11:25.984524 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bzqjv"] Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.000926 4583 scope.go:117] "RemoveContainer" containerID="00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.011533 4583 scope.go:117] "RemoveContainer" containerID="b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.012027 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3\": container with ID starting with b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3 not found: ID does not exist" containerID="b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.012057 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3"} err="failed to get container status \"b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3\": rpc error: code = NotFound desc = could not find container \"b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3\": container with ID starting with b69a19afd2bf6dcef07b74ec1fef6bf9a9bdf10b0dd501beb8e50168e7c759a3 not found: ID does not exist" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.012079 4583 scope.go:117] "RemoveContainer" containerID="b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.012527 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e\": container with ID starting with b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e not found: ID does not exist" containerID="b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.012574 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e"} err="failed to get container status \"b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e\": rpc error: code = NotFound desc = could not find container \"b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e\": container with ID starting with b5e48b0c3389bc5b8f3619c6b5932185436ee8241ae2feb184551ac52d3ef32e not found: ID does not exist" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.012596 4583 scope.go:117] "RemoveContainer" containerID="00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.012855 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071\": container with ID starting with 00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071 not found: ID does not exist" containerID="00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.012878 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071"} err="failed to get container status \"00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071\": rpc error: code = NotFound desc = could not find container \"00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071\": container with ID starting with 00c712c0cac24205781cb51a0ad8f16210547eb8ea0bddc946c50b8c28941071 not found: ID does not exist" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.085389 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" path="/var/lib/kubelet/pods/21ac83ea-ca63-4af3-84fa-75d38d58a3b6/volumes" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.965439 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr"] Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966231 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="registry-server" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966309 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="registry-server" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966389 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerName="console" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966438 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerName="console" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966486 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="util" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966537 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="util" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966586 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="extract" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966635 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="extract" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966684 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="pull" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966731 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="pull" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966799 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="extract-utilities" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966843 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="extract-utilities" Dec 09 15:11:26 crc kubenswrapper[4583]: E1209 15:11:26.966905 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="extract-content" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.966952 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="extract-content" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.967109 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ac83ea-ca63-4af3-84fa-75d38d58a3b6" containerName="registry-server" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.967177 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ff4a7f6-d61d-4fae-9806-b831561f37a5" containerName="extract" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.967230 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="63f028f1-f3fc-46f9-9eec-471fa53871ed" containerName="console" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.967651 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.971978 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.972195 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.972642 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.973286 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.977696 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-glkwt" Dec 09 15:11:26 crc kubenswrapper[4583]: I1209 15:11:26.987125 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr"] Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.158870 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d23da855-0d1f-4866-8281-1fa9a175a1bd-apiservice-cert\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.159079 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn2r6\" (UniqueName: \"kubernetes.io/projected/d23da855-0d1f-4866-8281-1fa9a175a1bd-kube-api-access-wn2r6\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.159128 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d23da855-0d1f-4866-8281-1fa9a175a1bd-webhook-cert\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.191392 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp"] Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.192110 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.193211 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-jlfvn" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.194308 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.195494 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.199342 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp"] Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.260267 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6cef6965-8116-435e-86e1-76c965e8c319-apiservice-cert\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.260323 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn2r6\" (UniqueName: \"kubernetes.io/projected/d23da855-0d1f-4866-8281-1fa9a175a1bd-kube-api-access-wn2r6\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.260345 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d23da855-0d1f-4866-8281-1fa9a175a1bd-webhook-cert\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.260439 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp2sb\" (UniqueName: \"kubernetes.io/projected/6cef6965-8116-435e-86e1-76c965e8c319-kube-api-access-jp2sb\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.260494 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d23da855-0d1f-4866-8281-1fa9a175a1bd-apiservice-cert\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.260561 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6cef6965-8116-435e-86e1-76c965e8c319-webhook-cert\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.264827 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d23da855-0d1f-4866-8281-1fa9a175a1bd-webhook-cert\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.264841 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d23da855-0d1f-4866-8281-1fa9a175a1bd-apiservice-cert\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.273806 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn2r6\" (UniqueName: \"kubernetes.io/projected/d23da855-0d1f-4866-8281-1fa9a175a1bd-kube-api-access-wn2r6\") pod \"metallb-operator-controller-manager-f7f465b94-n2qjr\" (UID: \"d23da855-0d1f-4866-8281-1fa9a175a1bd\") " pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.280631 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.362451 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp2sb\" (UniqueName: \"kubernetes.io/projected/6cef6965-8116-435e-86e1-76c965e8c319-kube-api-access-jp2sb\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.362525 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6cef6965-8116-435e-86e1-76c965e8c319-webhook-cert\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.362598 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6cef6965-8116-435e-86e1-76c965e8c319-apiservice-cert\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.366889 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6cef6965-8116-435e-86e1-76c965e8c319-webhook-cert\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.366978 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6cef6965-8116-435e-86e1-76c965e8c319-apiservice-cert\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.375771 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp2sb\" (UniqueName: \"kubernetes.io/projected/6cef6965-8116-435e-86e1-76c965e8c319-kube-api-access-jp2sb\") pod \"metallb-operator-webhook-server-696488d7c9-kfwfp\" (UID: \"6cef6965-8116-435e-86e1-76c965e8c319\") " pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.507093 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.634717 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr"] Dec 09 15:11:27 crc kubenswrapper[4583]: W1209 15:11:27.642854 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd23da855_0d1f_4866_8281_1fa9a175a1bd.slice/crio-f03edbe1471a4123bc291726941b24e052491f0dd2a2e1ed5b08e2d5b5df8bfe WatchSource:0}: Error finding container f03edbe1471a4123bc291726941b24e052491f0dd2a2e1ed5b08e2d5b5df8bfe: Status 404 returned error can't find the container with id f03edbe1471a4123bc291726941b24e052491f0dd2a2e1ed5b08e2d5b5df8bfe Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.845980 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp"] Dec 09 15:11:27 crc kubenswrapper[4583]: W1209 15:11:27.849014 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cef6965_8116_435e_86e1_76c965e8c319.slice/crio-cc299be9c7bad9fd32436935620c261da7e9f805224fbbdcfcf4c75c116d9837 WatchSource:0}: Error finding container cc299be9c7bad9fd32436935620c261da7e9f805224fbbdcfcf4c75c116d9837: Status 404 returned error can't find the container with id cc299be9c7bad9fd32436935620c261da7e9f805224fbbdcfcf4c75c116d9837 Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.965889 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" event={"ID":"d23da855-0d1f-4866-8281-1fa9a175a1bd","Type":"ContainerStarted","Data":"f03edbe1471a4123bc291726941b24e052491f0dd2a2e1ed5b08e2d5b5df8bfe"} Dec 09 15:11:27 crc kubenswrapper[4583]: I1209 15:11:27.966910 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" event={"ID":"6cef6965-8116-435e-86e1-76c965e8c319","Type":"ContainerStarted","Data":"cc299be9c7bad9fd32436935620c261da7e9f805224fbbdcfcf4c75c116d9837"} Dec 09 15:11:30 crc kubenswrapper[4583]: I1209 15:11:30.985302 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" event={"ID":"d23da855-0d1f-4866-8281-1fa9a175a1bd","Type":"ContainerStarted","Data":"4c64471c0c819957bb4a952dd3c9f6aa473aa7afe326b85395505c95f47e3569"} Dec 09 15:11:30 crc kubenswrapper[4583]: I1209 15:11:30.985526 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:11:31 crc kubenswrapper[4583]: I1209 15:11:31.003396 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" podStartSLOduration=2.588418602 podStartE2EDuration="5.00338325s" podCreationTimestamp="2025-12-09 15:11:26 +0000 UTC" firstStartedPulling="2025-12-09 15:11:27.645834713 +0000 UTC m=+785.853283801" lastFinishedPulling="2025-12-09 15:11:30.060799361 +0000 UTC m=+788.268248449" observedRunningTime="2025-12-09 15:11:31.001998391 +0000 UTC m=+789.209447480" watchObservedRunningTime="2025-12-09 15:11:31.00338325 +0000 UTC m=+789.210832338" Dec 09 15:11:31 crc kubenswrapper[4583]: I1209 15:11:31.991479 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" event={"ID":"6cef6965-8116-435e-86e1-76c965e8c319","Type":"ContainerStarted","Data":"65feb2d3d3a80973b33441aec5a831efc1155c2832131ba1fe1dc87f7d2446f8"} Dec 09 15:11:32 crc kubenswrapper[4583]: I1209 15:11:32.004986 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" podStartSLOduration=1.617668256 podStartE2EDuration="5.004971879s" podCreationTimestamp="2025-12-09 15:11:27 +0000 UTC" firstStartedPulling="2025-12-09 15:11:27.851577688 +0000 UTC m=+786.059026776" lastFinishedPulling="2025-12-09 15:11:31.238881321 +0000 UTC m=+789.446330399" observedRunningTime="2025-12-09 15:11:32.003845629 +0000 UTC m=+790.211294716" watchObservedRunningTime="2025-12-09 15:11:32.004971879 +0000 UTC m=+790.212420968" Dec 09 15:11:32 crc kubenswrapper[4583]: I1209 15:11:32.995605 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:11:47 crc kubenswrapper[4583]: I1209 15:11:47.515254 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-696488d7c9-kfwfp" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.283878 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f7f465b94-n2qjr" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.810648 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2zn75"] Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.822783 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.825258 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.826008 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-hxznh" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.826251 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.830863 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w"] Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.831576 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.833585 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.839887 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w"] Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841157 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/55c9372b-521a-4b5f-adea-817d98f90de4-frr-startup\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841205 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-frr-conf\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841261 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-reloader\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841288 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-frr-sockets\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841318 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7gs9\" (UniqueName: \"kubernetes.io/projected/d7363ae4-f775-4300-b836-aeb7c384dcee-kube-api-access-w7gs9\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841373 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7363ae4-f775-4300-b836-aeb7c384dcee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841395 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f2l8\" (UniqueName: \"kubernetes.io/projected/55c9372b-521a-4b5f-adea-817d98f90de4-kube-api-access-8f2l8\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841422 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/55c9372b-521a-4b5f-adea-817d98f90de4-metrics-certs\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.841440 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-metrics\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.885377 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-58smd"] Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.886416 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-58smd" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.889060 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.889129 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.889209 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.889337 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-8tdj5" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.895540 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-97nqc"] Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.896560 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.898045 4583 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.916484 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-97nqc"] Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942373 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-frr-sockets\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942420 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7gs9\" (UniqueName: \"kubernetes.io/projected/d7363ae4-f775-4300-b836-aeb7c384dcee-kube-api-access-w7gs9\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942797 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-frr-sockets\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942933 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7363ae4-f775-4300-b836-aeb7c384dcee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942957 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f2l8\" (UniqueName: \"kubernetes.io/projected/55c9372b-521a-4b5f-adea-817d98f90de4-kube-api-access-8f2l8\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942980 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/55c9372b-521a-4b5f-adea-817d98f90de4-metrics-certs\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.942995 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-metrics\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: E1209 15:12:07.943003 4583 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 09 15:12:07 crc kubenswrapper[4583]: E1209 15:12:07.943048 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d7363ae4-f775-4300-b836-aeb7c384dcee-cert podName:d7363ae4-f775-4300-b836-aeb7c384dcee nodeName:}" failed. No retries permitted until 2025-12-09 15:12:08.443033655 +0000 UTC m=+826.650482744 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d7363ae4-f775-4300-b836-aeb7c384dcee-cert") pod "frr-k8s-webhook-server-7fcb986d4-68j5w" (UID: "d7363ae4-f775-4300-b836-aeb7c384dcee") : secret "frr-k8s-webhook-server-cert" not found Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943010 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/55c9372b-521a-4b5f-adea-817d98f90de4-frr-startup\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943224 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-frr-conf\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943456 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-reloader\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: E1209 15:12:07.943398 4583 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 09 15:12:07 crc kubenswrapper[4583]: E1209 15:12:07.943684 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55c9372b-521a-4b5f-adea-817d98f90de4-metrics-certs podName:55c9372b-521a-4b5f-adea-817d98f90de4 nodeName:}" failed. No retries permitted until 2025-12-09 15:12:08.443664804 +0000 UTC m=+826.651113892 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/55c9372b-521a-4b5f-adea-817d98f90de4-metrics-certs") pod "frr-k8s-2zn75" (UID: "55c9372b-521a-4b5f-adea-817d98f90de4") : secret "frr-k8s-certs-secret" not found Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943690 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-frr-conf\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943819 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-reloader\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943698 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/55c9372b-521a-4b5f-adea-817d98f90de4-frr-startup\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.943603 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/55c9372b-521a-4b5f-adea-817d98f90de4-metrics\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.959001 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7gs9\" (UniqueName: \"kubernetes.io/projected/d7363ae4-f775-4300-b836-aeb7c384dcee-kube-api-access-w7gs9\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:07 crc kubenswrapper[4583]: I1209 15:12:07.959118 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f2l8\" (UniqueName: \"kubernetes.io/projected/55c9372b-521a-4b5f-adea-817d98f90de4-kube-api-access-8f2l8\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044200 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-metallb-excludel2\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044400 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8w2\" (UniqueName: \"kubernetes.io/projected/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-kube-api-access-zf8w2\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044493 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044575 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-metrics-certs\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044660 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-metrics-certs\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044738 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww7g5\" (UniqueName: \"kubernetes.io/projected/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-kube-api-access-ww7g5\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.044834 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-cert\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145472 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-metallb-excludel2\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145510 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8w2\" (UniqueName: \"kubernetes.io/projected/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-kube-api-access-zf8w2\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145533 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145557 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-metrics-certs\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145579 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-metrics-certs\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145610 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww7g5\" (UniqueName: \"kubernetes.io/projected/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-kube-api-access-ww7g5\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.145627 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-cert\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: E1209 15:12:08.146477 4583 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 09 15:12:08 crc kubenswrapper[4583]: E1209 15:12:08.146552 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist podName:e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f nodeName:}" failed. No retries permitted until 2025-12-09 15:12:08.646533656 +0000 UTC m=+826.853982744 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist") pod "speaker-58smd" (UID: "e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f") : secret "metallb-memberlist" not found Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.147119 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-metallb-excludel2\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.148598 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-metrics-certs\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.148712 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-metrics-certs\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.149023 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-cert\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.160599 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8w2\" (UniqueName: \"kubernetes.io/projected/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-kube-api-access-zf8w2\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.160771 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww7g5\" (UniqueName: \"kubernetes.io/projected/d6e8ec80-20a1-4023-aa5d-3a29154e66c3-kube-api-access-ww7g5\") pod \"controller-f8648f98b-97nqc\" (UID: \"d6e8ec80-20a1-4023-aa5d-3a29154e66c3\") " pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.213484 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.449363 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7363ae4-f775-4300-b836-aeb7c384dcee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.449592 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/55c9372b-521a-4b5f-adea-817d98f90de4-metrics-certs\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.452540 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/55c9372b-521a-4b5f-adea-817d98f90de4-metrics-certs\") pod \"frr-k8s-2zn75\" (UID: \"55c9372b-521a-4b5f-adea-817d98f90de4\") " pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.452570 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7363ae4-f775-4300-b836-aeb7c384dcee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-68j5w\" (UID: \"d7363ae4-f775-4300-b836-aeb7c384dcee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.577134 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-97nqc"] Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.651287 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:08 crc kubenswrapper[4583]: E1209 15:12:08.651504 4583 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 09 15:12:08 crc kubenswrapper[4583]: E1209 15:12:08.651559 4583 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist podName:e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f nodeName:}" failed. No retries permitted until 2025-12-09 15:12:09.651545619 +0000 UTC m=+827.858994697 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist") pod "speaker-58smd" (UID: "e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f") : secret "metallb-memberlist" not found Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.736041 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:08 crc kubenswrapper[4583]: I1209 15:12:08.742049 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.090804 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w"] Dec 09 15:12:09 crc kubenswrapper[4583]: W1209 15:12:09.092913 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7363ae4_f775_4300_b836_aeb7c384dcee.slice/crio-0f14948fbaaa246a3a496c9c0400cab2b0918896282d0272f0dc373a3bb38e15 WatchSource:0}: Error finding container 0f14948fbaaa246a3a496c9c0400cab2b0918896282d0272f0dc373a3bb38e15: Status 404 returned error can't find the container with id 0f14948fbaaa246a3a496c9c0400cab2b0918896282d0272f0dc373a3bb38e15 Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.163260 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"8fbe43186a02d877ea925e6ce6585a238923f0df2b0fa20d4508f6c8b6f05b6d"} Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.164295 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" event={"ID":"d7363ae4-f775-4300-b836-aeb7c384dcee","Type":"ContainerStarted","Data":"0f14948fbaaa246a3a496c9c0400cab2b0918896282d0272f0dc373a3bb38e15"} Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.165543 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-97nqc" event={"ID":"d6e8ec80-20a1-4023-aa5d-3a29154e66c3","Type":"ContainerStarted","Data":"00d1a943ff9a175cbd0a23bff6e868a53d541d286625eb9f5fc51d75911c8bc3"} Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.165575 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-97nqc" event={"ID":"d6e8ec80-20a1-4023-aa5d-3a29154e66c3","Type":"ContainerStarted","Data":"b15bc56867d8a97ff664e5030e5c87309769ae240dfa8aedaeb79f0846010558"} Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.165584 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-97nqc" event={"ID":"d6e8ec80-20a1-4023-aa5d-3a29154e66c3","Type":"ContainerStarted","Data":"19b1e92611ed931b29dd99eb214f4001c5215efb1a65fe53a4b57fea54181fec"} Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.165672 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.176439 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-97nqc" podStartSLOduration=2.176427652 podStartE2EDuration="2.176427652s" podCreationTimestamp="2025-12-09 15:12:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:12:09.174113073 +0000 UTC m=+827.381562161" watchObservedRunningTime="2025-12-09 15:12:09.176427652 +0000 UTC m=+827.383876740" Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.665736 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.672513 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f-memberlist\") pod \"speaker-58smd\" (UID: \"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f\") " pod="metallb-system/speaker-58smd" Dec 09 15:12:09 crc kubenswrapper[4583]: I1209 15:12:09.699540 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-58smd" Dec 09 15:12:09 crc kubenswrapper[4583]: W1209 15:12:09.714212 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode00bf7b0_93a9_4e3c_a8e8_0d57fa90e99f.slice/crio-e40acd84b67880eeced48ac484a13cc257d73ce1dc8445ca79c7b9627ba3b8ae WatchSource:0}: Error finding container e40acd84b67880eeced48ac484a13cc257d73ce1dc8445ca79c7b9627ba3b8ae: Status 404 returned error can't find the container with id e40acd84b67880eeced48ac484a13cc257d73ce1dc8445ca79c7b9627ba3b8ae Dec 09 15:12:10 crc kubenswrapper[4583]: I1209 15:12:10.174056 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-58smd" event={"ID":"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f","Type":"ContainerStarted","Data":"b140a7403a7ccc3533f3111c6bcef53203b5dbdd8836b63e4bc2af334116dbaa"} Dec 09 15:12:10 crc kubenswrapper[4583]: I1209 15:12:10.174398 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-58smd" event={"ID":"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f","Type":"ContainerStarted","Data":"92c99caa1cea91fa478a6396dfc13b08e9fbc3e5963017c900ae71b92d9e6bf5"} Dec 09 15:12:10 crc kubenswrapper[4583]: I1209 15:12:10.174410 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-58smd" event={"ID":"e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f","Type":"ContainerStarted","Data":"e40acd84b67880eeced48ac484a13cc257d73ce1dc8445ca79c7b9627ba3b8ae"} Dec 09 15:12:10 crc kubenswrapper[4583]: I1209 15:12:10.174603 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-58smd" Dec 09 15:12:10 crc kubenswrapper[4583]: I1209 15:12:10.188724 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-58smd" podStartSLOduration=3.188711164 podStartE2EDuration="3.188711164s" podCreationTimestamp="2025-12-09 15:12:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-09 15:12:10.185853841 +0000 UTC m=+828.393302930" watchObservedRunningTime="2025-12-09 15:12:10.188711164 +0000 UTC m=+828.396160251" Dec 09 15:12:15 crc kubenswrapper[4583]: I1209 15:12:15.208125 4583 generic.go:334] "Generic (PLEG): container finished" podID="55c9372b-521a-4b5f-adea-817d98f90de4" containerID="6052e5f1ddf75e3b9ae6cfeaa899ca60ff9997b6d830b2e6cc79626bd54b5ca9" exitCode=0 Dec 09 15:12:15 crc kubenswrapper[4583]: I1209 15:12:15.208169 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerDied","Data":"6052e5f1ddf75e3b9ae6cfeaa899ca60ff9997b6d830b2e6cc79626bd54b5ca9"} Dec 09 15:12:15 crc kubenswrapper[4583]: I1209 15:12:15.209586 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" event={"ID":"d7363ae4-f775-4300-b836-aeb7c384dcee","Type":"ContainerStarted","Data":"8bc4e609d49af7fda8aaa06feac36bff692fc02b9b8e78332386d06d5ed11ff3"} Dec 09 15:12:15 crc kubenswrapper[4583]: I1209 15:12:15.209733 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:15 crc kubenswrapper[4583]: I1209 15:12:15.233685 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" podStartSLOduration=2.987947514 podStartE2EDuration="8.23366945s" podCreationTimestamp="2025-12-09 15:12:07 +0000 UTC" firstStartedPulling="2025-12-09 15:12:09.094524567 +0000 UTC m=+827.301973656" lastFinishedPulling="2025-12-09 15:12:14.340246503 +0000 UTC m=+832.547695592" observedRunningTime="2025-12-09 15:12:15.233209685 +0000 UTC m=+833.440658772" watchObservedRunningTime="2025-12-09 15:12:15.23366945 +0000 UTC m=+833.441118539" Dec 09 15:12:16 crc kubenswrapper[4583]: I1209 15:12:16.216351 4583 generic.go:334] "Generic (PLEG): container finished" podID="55c9372b-521a-4b5f-adea-817d98f90de4" containerID="9aac5add346fd683d91e509b0cc7abf2abcd42d84af80244c4964fae19ad29f6" exitCode=0 Dec 09 15:12:16 crc kubenswrapper[4583]: I1209 15:12:16.216379 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerDied","Data":"9aac5add346fd683d91e509b0cc7abf2abcd42d84af80244c4964fae19ad29f6"} Dec 09 15:12:17 crc kubenswrapper[4583]: I1209 15:12:17.222817 4583 generic.go:334] "Generic (PLEG): container finished" podID="55c9372b-521a-4b5f-adea-817d98f90de4" containerID="8a6716f1ecb1250acdd5421c916c46f4990abb9c8839fcbd1bb0fa84a0d8c403" exitCode=0 Dec 09 15:12:17 crc kubenswrapper[4583]: I1209 15:12:17.222902 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerDied","Data":"8a6716f1ecb1250acdd5421c916c46f4990abb9c8839fcbd1bb0fa84a0d8c403"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.217437 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-97nqc" Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231229 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"0dd19d11dc92cd0e3ba8d12368d47f3867dc577ec85c4b7f10c527b8227aed7c"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231267 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"2fc07063d3eff5ab336aa8ad685d73b7e89454a0675f321e7d4a52083f2f75cb"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231277 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"1bdb07160ffa1d1694fd6263d31c2bcac9a2d3d0ad7849556609e9f93782c352"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231286 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"c0d592d090120db4de70c7eece406b07c960ea08106519cc942801eaac73581f"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231295 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"ac94ae389d2c50cc9143d9f8e96386db8ac493cfd17be9c7fd9a0490cc5426ed"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231302 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2zn75" event={"ID":"55c9372b-521a-4b5f-adea-817d98f90de4","Type":"ContainerStarted","Data":"71cf11b886514b06162b3a93b002dd4da4feb0127204cb19629a16a851af58e4"} Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.231407 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.247429 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2zn75" podStartSLOduration=5.736149321 podStartE2EDuration="11.247413809s" podCreationTimestamp="2025-12-09 15:12:07 +0000 UTC" firstStartedPulling="2025-12-09 15:12:08.825200735 +0000 UTC m=+827.032649822" lastFinishedPulling="2025-12-09 15:12:14.336465222 +0000 UTC m=+832.543914310" observedRunningTime="2025-12-09 15:12:18.245331437 +0000 UTC m=+836.452780525" watchObservedRunningTime="2025-12-09 15:12:18.247413809 +0000 UTC m=+836.454862897" Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.737002 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:18 crc kubenswrapper[4583]: I1209 15:12:18.767117 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:19 crc kubenswrapper[4583]: I1209 15:12:19.702370 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-58smd" Dec 09 15:12:21 crc kubenswrapper[4583]: I1209 15:12:21.985077 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-5fr5p"] Dec 09 15:12:21 crc kubenswrapper[4583]: I1209 15:12:21.986361 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:12:21 crc kubenswrapper[4583]: I1209 15:12:21.987834 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-48r6t" Dec 09 15:12:21 crc kubenswrapper[4583]: I1209 15:12:21.987901 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 09 15:12:21 crc kubenswrapper[4583]: I1209 15:12:21.988188 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 09 15:12:21 crc kubenswrapper[4583]: I1209 15:12:21.990012 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5fr5p"] Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.034460 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj8xh\" (UniqueName: \"kubernetes.io/projected/2ab400fe-20b5-4d42-92bc-e53d9454656e-kube-api-access-fj8xh\") pod \"openstack-operator-index-5fr5p\" (UID: \"2ab400fe-20b5-4d42-92bc-e53d9454656e\") " pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.135329 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj8xh\" (UniqueName: \"kubernetes.io/projected/2ab400fe-20b5-4d42-92bc-e53d9454656e-kube-api-access-fj8xh\") pod \"openstack-operator-index-5fr5p\" (UID: \"2ab400fe-20b5-4d42-92bc-e53d9454656e\") " pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.147236 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.158177 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.172506 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj8xh\" (UniqueName: \"kubernetes.io/projected/2ab400fe-20b5-4d42-92bc-e53d9454656e-kube-api-access-fj8xh\") pod \"openstack-operator-index-5fr5p\" (UID: \"2ab400fe-20b5-4d42-92bc-e53d9454656e\") " pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.303012 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-48r6t" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.312194 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.664219 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5fr5p"] Dec 09 15:12:22 crc kubenswrapper[4583]: I1209 15:12:22.670855 4583 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 09 15:12:23 crc kubenswrapper[4583]: I1209 15:12:23.263675 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5fr5p" event={"ID":"2ab400fe-20b5-4d42-92bc-e53d9454656e","Type":"ContainerStarted","Data":"236564a62da58ce3940d642929a03a884779d4e4308cace1b946167c6ac16278"} Dec 09 15:12:24 crc kubenswrapper[4583]: I1209 15:12:24.771789 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5fr5p"] Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.377002 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4vftt"] Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.377666 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4vftt" Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.382451 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4vftt"] Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.478453 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66tvz\" (UniqueName: \"kubernetes.io/projected/9726924c-7078-4a99-adbe-54e2070ef9da-kube-api-access-66tvz\") pod \"openstack-operator-index-4vftt\" (UID: \"9726924c-7078-4a99-adbe-54e2070ef9da\") " pod="openstack-operators/openstack-operator-index-4vftt" Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.578923 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66tvz\" (UniqueName: \"kubernetes.io/projected/9726924c-7078-4a99-adbe-54e2070ef9da-kube-api-access-66tvz\") pod \"openstack-operator-index-4vftt\" (UID: \"9726924c-7078-4a99-adbe-54e2070ef9da\") " pod="openstack-operators/openstack-operator-index-4vftt" Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.593458 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66tvz\" (UniqueName: \"kubernetes.io/projected/9726924c-7078-4a99-adbe-54e2070ef9da-kube-api-access-66tvz\") pod \"openstack-operator-index-4vftt\" (UID: \"9726924c-7078-4a99-adbe-54e2070ef9da\") " pod="openstack-operators/openstack-operator-index-4vftt" Dec 09 15:12:25 crc kubenswrapper[4583]: I1209 15:12:25.691565 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4vftt" Dec 09 15:12:26 crc kubenswrapper[4583]: I1209 15:12:26.030559 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4vftt"] Dec 09 15:12:26 crc kubenswrapper[4583]: W1209 15:12:26.032888 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9726924c_7078_4a99_adbe_54e2070ef9da.slice/crio-e386dabed3c6a3d9647b930c5ba544a66738190bb6c6782b746bf7b89d83d88b WatchSource:0}: Error finding container e386dabed3c6a3d9647b930c5ba544a66738190bb6c6782b746bf7b89d83d88b: Status 404 returned error can't find the container with id e386dabed3c6a3d9647b930c5ba544a66738190bb6c6782b746bf7b89d83d88b Dec 09 15:12:26 crc kubenswrapper[4583]: I1209 15:12:26.278277 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4vftt" event={"ID":"9726924c-7078-4a99-adbe-54e2070ef9da","Type":"ContainerStarted","Data":"e386dabed3c6a3d9647b930c5ba544a66738190bb6c6782b746bf7b89d83d88b"} Dec 09 15:12:28 crc kubenswrapper[4583]: I1209 15:12:28.741475 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2zn75" Dec 09 15:12:28 crc kubenswrapper[4583]: I1209 15:12:28.759254 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-68j5w" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.245056 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rhprr"] Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.246390 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.251767 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhprr"] Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.269148 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-utilities\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.269240 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzcxr\" (UniqueName: \"kubernetes.io/projected/57edcc4d-d0ff-425c-bdeb-1920e1020967-kube-api-access-dzcxr\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.269281 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-catalog-content\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.370259 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-catalog-content\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.370319 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-utilities\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.370389 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzcxr\" (UniqueName: \"kubernetes.io/projected/57edcc4d-d0ff-425c-bdeb-1920e1020967-kube-api-access-dzcxr\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.370659 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-catalog-content\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.370766 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-utilities\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.386129 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzcxr\" (UniqueName: \"kubernetes.io/projected/57edcc4d-d0ff-425c-bdeb-1920e1020967-kube-api-access-dzcxr\") pod \"redhat-marketplace-rhprr\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.557654 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:12:58 crc kubenswrapper[4583]: I1209 15:12:58.904927 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhprr"] Dec 09 15:12:59 crc kubenswrapper[4583]: I1209 15:12:59.391526 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:12:59 crc kubenswrapper[4583]: I1209 15:12:59.391583 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:12:59 crc kubenswrapper[4583]: I1209 15:12:59.417613 4583 generic.go:334] "Generic (PLEG): container finished" podID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerID="299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322" exitCode=0 Dec 09 15:12:59 crc kubenswrapper[4583]: I1209 15:12:59.417649 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhprr" event={"ID":"57edcc4d-d0ff-425c-bdeb-1920e1020967","Type":"ContainerDied","Data":"299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322"} Dec 09 15:12:59 crc kubenswrapper[4583]: I1209 15:12:59.417695 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhprr" event={"ID":"57edcc4d-d0ff-425c-bdeb-1920e1020967","Type":"ContainerStarted","Data":"a413e0c9a3f7957df68d82c331cb018fb131c8e8d33d70ae68454cbb963e9c01"} Dec 09 15:13:00 crc kubenswrapper[4583]: I1209 15:13:00.423809 4583 generic.go:334] "Generic (PLEG): container finished" podID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerID="ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a" exitCode=0 Dec 09 15:13:00 crc kubenswrapper[4583]: I1209 15:13:00.423853 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhprr" event={"ID":"57edcc4d-d0ff-425c-bdeb-1920e1020967","Type":"ContainerDied","Data":"ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a"} Dec 09 15:13:01 crc kubenswrapper[4583]: I1209 15:13:01.429434 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhprr" event={"ID":"57edcc4d-d0ff-425c-bdeb-1920e1020967","Type":"ContainerStarted","Data":"46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8"} Dec 09 15:13:01 crc kubenswrapper[4583]: I1209 15:13:01.448831 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rhprr" podStartSLOduration=1.983081754 podStartE2EDuration="3.448816149s" podCreationTimestamp="2025-12-09 15:12:58 +0000 UTC" firstStartedPulling="2025-12-09 15:12:59.419029366 +0000 UTC m=+877.626478454" lastFinishedPulling="2025-12-09 15:13:00.884763762 +0000 UTC m=+879.092212849" observedRunningTime="2025-12-09 15:13:01.4452217 +0000 UTC m=+879.652670779" watchObservedRunningTime="2025-12-09 15:13:01.448816149 +0000 UTC m=+879.656265237" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.632917 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zkj7k"] Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.634803 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.639623 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zkj7k"] Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.650560 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-catalog-content\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.650678 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-utilities\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.650884 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjgll\" (UniqueName: \"kubernetes.io/projected/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-kube-api-access-sjgll\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.751346 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-catalog-content\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.751592 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-utilities\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.751718 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjgll\" (UniqueName: \"kubernetes.io/projected/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-kube-api-access-sjgll\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.751818 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-catalog-content\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.751974 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-utilities\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.766673 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjgll\" (UniqueName: \"kubernetes.io/projected/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-kube-api-access-sjgll\") pod \"certified-operators-zkj7k\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:04 crc kubenswrapper[4583]: I1209 15:13:04.946335 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:05 crc kubenswrapper[4583]: I1209 15:13:05.329680 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zkj7k"] Dec 09 15:13:05 crc kubenswrapper[4583]: W1209 15:13:05.331870 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cf32dea_23ec_4dcd_8c7f_6204267e1cca.slice/crio-e0d8cd035155fc76e7522b499715020b54b12439ed36c3b211d4aeb4e4d6c9f8 WatchSource:0}: Error finding container e0d8cd035155fc76e7522b499715020b54b12439ed36c3b211d4aeb4e4d6c9f8: Status 404 returned error can't find the container with id e0d8cd035155fc76e7522b499715020b54b12439ed36c3b211d4aeb4e4d6c9f8 Dec 09 15:13:05 crc kubenswrapper[4583]: I1209 15:13:05.447490 4583 generic.go:334] "Generic (PLEG): container finished" podID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerID="3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6" exitCode=0 Dec 09 15:13:05 crc kubenswrapper[4583]: I1209 15:13:05.447523 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zkj7k" event={"ID":"5cf32dea-23ec-4dcd-8c7f-6204267e1cca","Type":"ContainerDied","Data":"3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6"} Dec 09 15:13:05 crc kubenswrapper[4583]: I1209 15:13:05.447676 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zkj7k" event={"ID":"5cf32dea-23ec-4dcd-8c7f-6204267e1cca","Type":"ContainerStarted","Data":"e0d8cd035155fc76e7522b499715020b54b12439ed36c3b211d4aeb4e4d6c9f8"} Dec 09 15:13:07 crc kubenswrapper[4583]: I1209 15:13:07.458034 4583 generic.go:334] "Generic (PLEG): container finished" podID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerID="294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f" exitCode=0 Dec 09 15:13:07 crc kubenswrapper[4583]: I1209 15:13:07.458132 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zkj7k" event={"ID":"5cf32dea-23ec-4dcd-8c7f-6204267e1cca","Type":"ContainerDied","Data":"294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f"} Dec 09 15:13:08 crc kubenswrapper[4583]: I1209 15:13:08.466787 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zkj7k" event={"ID":"5cf32dea-23ec-4dcd-8c7f-6204267e1cca","Type":"ContainerStarted","Data":"bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70"} Dec 09 15:13:08 crc kubenswrapper[4583]: I1209 15:13:08.480602 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zkj7k" podStartSLOduration=1.8900750469999998 podStartE2EDuration="4.480590579s" podCreationTimestamp="2025-12-09 15:13:04 +0000 UTC" firstStartedPulling="2025-12-09 15:13:05.448654641 +0000 UTC m=+883.656103730" lastFinishedPulling="2025-12-09 15:13:08.039170174 +0000 UTC m=+886.246619262" observedRunningTime="2025-12-09 15:13:08.478699572 +0000 UTC m=+886.686148660" watchObservedRunningTime="2025-12-09 15:13:08.480590579 +0000 UTC m=+886.688039667" Dec 09 15:13:08 crc kubenswrapper[4583]: I1209 15:13:08.558091 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:13:08 crc kubenswrapper[4583]: I1209 15:13:08.558133 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:13:08 crc kubenswrapper[4583]: I1209 15:13:08.586902 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:13:09 crc kubenswrapper[4583]: I1209 15:13:09.500772 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:13:09 crc kubenswrapper[4583]: I1209 15:13:09.826930 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhprr"] Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.480451 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rhprr" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="registry-server" containerID="cri-o://46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8" gracePeriod=2 Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.778116 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.933418 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-utilities\") pod \"57edcc4d-d0ff-425c-bdeb-1920e1020967\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.933474 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzcxr\" (UniqueName: \"kubernetes.io/projected/57edcc4d-d0ff-425c-bdeb-1920e1020967-kube-api-access-dzcxr\") pod \"57edcc4d-d0ff-425c-bdeb-1920e1020967\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.933574 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-catalog-content\") pod \"57edcc4d-d0ff-425c-bdeb-1920e1020967\" (UID: \"57edcc4d-d0ff-425c-bdeb-1920e1020967\") " Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.934153 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-utilities" (OuterVolumeSpecName: "utilities") pod "57edcc4d-d0ff-425c-bdeb-1920e1020967" (UID: "57edcc4d-d0ff-425c-bdeb-1920e1020967"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.937487 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57edcc4d-d0ff-425c-bdeb-1920e1020967-kube-api-access-dzcxr" (OuterVolumeSpecName: "kube-api-access-dzcxr") pod "57edcc4d-d0ff-425c-bdeb-1920e1020967" (UID: "57edcc4d-d0ff-425c-bdeb-1920e1020967"). InnerVolumeSpecName "kube-api-access-dzcxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:13:11 crc kubenswrapper[4583]: I1209 15:13:11.946794 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57edcc4d-d0ff-425c-bdeb-1920e1020967" (UID: "57edcc4d-d0ff-425c-bdeb-1920e1020967"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.034607 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.034633 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57edcc4d-d0ff-425c-bdeb-1920e1020967-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.034642 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzcxr\" (UniqueName: \"kubernetes.io/projected/57edcc4d-d0ff-425c-bdeb-1920e1020967-kube-api-access-dzcxr\") on node \"crc\" DevicePath \"\"" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.486996 4583 generic.go:334] "Generic (PLEG): container finished" podID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerID="46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8" exitCode=0 Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.487031 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhprr" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.487034 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhprr" event={"ID":"57edcc4d-d0ff-425c-bdeb-1920e1020967","Type":"ContainerDied","Data":"46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8"} Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.487130 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhprr" event={"ID":"57edcc4d-d0ff-425c-bdeb-1920e1020967","Type":"ContainerDied","Data":"a413e0c9a3f7957df68d82c331cb018fb131c8e8d33d70ae68454cbb963e9c01"} Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.487148 4583 scope.go:117] "RemoveContainer" containerID="46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.500433 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhprr"] Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.501500 4583 scope.go:117] "RemoveContainer" containerID="ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.504450 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhprr"] Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.511799 4583 scope.go:117] "RemoveContainer" containerID="299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.528287 4583 scope.go:117] "RemoveContainer" containerID="46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8" Dec 09 15:13:12 crc kubenswrapper[4583]: E1209 15:13:12.528531 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8\": container with ID starting with 46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8 not found: ID does not exist" containerID="46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.528563 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8"} err="failed to get container status \"46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8\": rpc error: code = NotFound desc = could not find container \"46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8\": container with ID starting with 46c6c3854e65e0dd74a4f75b47937889ccdaf6b56ee3ce5af3f75ccf20da66c8 not found: ID does not exist" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.528586 4583 scope.go:117] "RemoveContainer" containerID="ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a" Dec 09 15:13:12 crc kubenswrapper[4583]: E1209 15:13:12.528825 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a\": container with ID starting with ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a not found: ID does not exist" containerID="ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.528862 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a"} err="failed to get container status \"ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a\": rpc error: code = NotFound desc = could not find container \"ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a\": container with ID starting with ea3e254c0a17bec2cd4a861b474ba0dc05a551a7dccd070c4e2376f0e1736a7a not found: ID does not exist" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.528881 4583 scope.go:117] "RemoveContainer" containerID="299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322" Dec 09 15:13:12 crc kubenswrapper[4583]: E1209 15:13:12.529135 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322\": container with ID starting with 299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322 not found: ID does not exist" containerID="299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322" Dec 09 15:13:12 crc kubenswrapper[4583]: I1209 15:13:12.529207 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322"} err="failed to get container status \"299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322\": rpc error: code = NotFound desc = could not find container \"299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322\": container with ID starting with 299ba361cd24dcb1824a20e20f049240c575b2daebdf1cdb7fbf756f32b08322 not found: ID does not exist" Dec 09 15:13:14 crc kubenswrapper[4583]: I1209 15:13:14.085131 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" path="/var/lib/kubelet/pods/57edcc4d-d0ff-425c-bdeb-1920e1020967/volumes" Dec 09 15:13:14 crc kubenswrapper[4583]: I1209 15:13:14.946854 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:14 crc kubenswrapper[4583]: I1209 15:13:14.946888 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:14 crc kubenswrapper[4583]: I1209 15:13:14.973367 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:15 crc kubenswrapper[4583]: I1209 15:13:15.528050 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:16 crc kubenswrapper[4583]: I1209 15:13:16.227284 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zkj7k"] Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.509805 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zkj7k" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="registry-server" containerID="cri-o://bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70" gracePeriod=2 Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.816442 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.909165 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjgll\" (UniqueName: \"kubernetes.io/projected/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-kube-api-access-sjgll\") pod \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.909213 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-utilities\") pod \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.909255 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-catalog-content\") pod \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\" (UID: \"5cf32dea-23ec-4dcd-8c7f-6204267e1cca\") " Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.910061 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-utilities" (OuterVolumeSpecName: "utilities") pod "5cf32dea-23ec-4dcd-8c7f-6204267e1cca" (UID: "5cf32dea-23ec-4dcd-8c7f-6204267e1cca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.913280 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-kube-api-access-sjgll" (OuterVolumeSpecName: "kube-api-access-sjgll") pod "5cf32dea-23ec-4dcd-8c7f-6204267e1cca" (UID: "5cf32dea-23ec-4dcd-8c7f-6204267e1cca"). InnerVolumeSpecName "kube-api-access-sjgll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:13:17 crc kubenswrapper[4583]: I1209 15:13:17.944332 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cf32dea-23ec-4dcd-8c7f-6204267e1cca" (UID: "5cf32dea-23ec-4dcd-8c7f-6204267e1cca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.010362 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjgll\" (UniqueName: \"kubernetes.io/projected/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-kube-api-access-sjgll\") on node \"crc\" DevicePath \"\"" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.010388 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.010399 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf32dea-23ec-4dcd-8c7f-6204267e1cca-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.515603 4583 generic.go:334] "Generic (PLEG): container finished" podID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerID="bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70" exitCode=0 Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.515648 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zkj7k" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.515663 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zkj7k" event={"ID":"5cf32dea-23ec-4dcd-8c7f-6204267e1cca","Type":"ContainerDied","Data":"bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70"} Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.515910 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zkj7k" event={"ID":"5cf32dea-23ec-4dcd-8c7f-6204267e1cca","Type":"ContainerDied","Data":"e0d8cd035155fc76e7522b499715020b54b12439ed36c3b211d4aeb4e4d6c9f8"} Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.515926 4583 scope.go:117] "RemoveContainer" containerID="bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.528440 4583 scope.go:117] "RemoveContainer" containerID="294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.531590 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zkj7k"] Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.535503 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zkj7k"] Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.541266 4583 scope.go:117] "RemoveContainer" containerID="3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.556690 4583 scope.go:117] "RemoveContainer" containerID="bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70" Dec 09 15:13:18 crc kubenswrapper[4583]: E1209 15:13:18.557000 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70\": container with ID starting with bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70 not found: ID does not exist" containerID="bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.557026 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70"} err="failed to get container status \"bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70\": rpc error: code = NotFound desc = could not find container \"bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70\": container with ID starting with bbaf8d2e185f5efc31493b3f93167b818d9da7ba3a35e07b6238ad76c61cdf70 not found: ID does not exist" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.557044 4583 scope.go:117] "RemoveContainer" containerID="294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f" Dec 09 15:13:18 crc kubenswrapper[4583]: E1209 15:13:18.557284 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f\": container with ID starting with 294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f not found: ID does not exist" containerID="294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.557308 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f"} err="failed to get container status \"294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f\": rpc error: code = NotFound desc = could not find container \"294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f\": container with ID starting with 294be2afaa89d2b6be02225217dd022447c3c0072d275427fd99680e4dafe92f not found: ID does not exist" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.557323 4583 scope.go:117] "RemoveContainer" containerID="3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6" Dec 09 15:13:18 crc kubenswrapper[4583]: E1209 15:13:18.557549 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6\": container with ID starting with 3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6 not found: ID does not exist" containerID="3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6" Dec 09 15:13:18 crc kubenswrapper[4583]: I1209 15:13:18.557569 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6"} err="failed to get container status \"3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6\": rpc error: code = NotFound desc = could not find container \"3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6\": container with ID starting with 3fa9e6b84b5c02e8bd25d480996d4d7112b8dacb5cae6d621aef0e568cb4eae6 not found: ID does not exist" Dec 09 15:13:20 crc kubenswrapper[4583]: I1209 15:13:20.084305 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" path="/var/lib/kubelet/pods/5cf32dea-23ec-4dcd-8c7f-6204267e1cca/volumes" Dec 09 15:13:29 crc kubenswrapper[4583]: I1209 15:13:29.391602 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:13:29 crc kubenswrapper[4583]: I1209 15:13:29.391990 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.470916 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m5zl9"] Dec 09 15:13:49 crc kubenswrapper[4583]: E1209 15:13:49.471437 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="registry-server" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471448 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="registry-server" Dec 09 15:13:49 crc kubenswrapper[4583]: E1209 15:13:49.471456 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="extract-content" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471463 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="extract-content" Dec 09 15:13:49 crc kubenswrapper[4583]: E1209 15:13:49.471471 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="extract-content" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471476 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="extract-content" Dec 09 15:13:49 crc kubenswrapper[4583]: E1209 15:13:49.471485 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="registry-server" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471490 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="registry-server" Dec 09 15:13:49 crc kubenswrapper[4583]: E1209 15:13:49.471499 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="extract-utilities" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471505 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="extract-utilities" Dec 09 15:13:49 crc kubenswrapper[4583]: E1209 15:13:49.471514 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="extract-utilities" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471519 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="extract-utilities" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471616 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cf32dea-23ec-4dcd-8c7f-6204267e1cca" containerName="registry-server" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.471624 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="57edcc4d-d0ff-425c-bdeb-1920e1020967" containerName="registry-server" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.472379 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.480626 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m5zl9"] Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.581703 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-catalog-content\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.581972 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26k6r\" (UniqueName: \"kubernetes.io/projected/5769e572-1356-407d-bddb-dd1e33707c9c-kube-api-access-26k6r\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.582056 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-utilities\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.683361 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26k6r\" (UniqueName: \"kubernetes.io/projected/5769e572-1356-407d-bddb-dd1e33707c9c-kube-api-access-26k6r\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.683553 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-utilities\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.683692 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-catalog-content\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.684030 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-utilities\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.684047 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-catalog-content\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.713505 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26k6r\" (UniqueName: \"kubernetes.io/projected/5769e572-1356-407d-bddb-dd1e33707c9c-kube-api-access-26k6r\") pod \"community-operators-m5zl9\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:49 crc kubenswrapper[4583]: I1209 15:13:49.790929 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:50 crc kubenswrapper[4583]: I1209 15:13:50.180518 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m5zl9"] Dec 09 15:13:50 crc kubenswrapper[4583]: W1209 15:13:50.182947 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5769e572_1356_407d_bddb_dd1e33707c9c.slice/crio-c46ee3b9c1ad782bbbe61afcb460ddd3188f6a4dbfdac2534e2b7b37a959e0b6 WatchSource:0}: Error finding container c46ee3b9c1ad782bbbe61afcb460ddd3188f6a4dbfdac2534e2b7b37a959e0b6: Status 404 returned error can't find the container with id c46ee3b9c1ad782bbbe61afcb460ddd3188f6a4dbfdac2534e2b7b37a959e0b6 Dec 09 15:13:50 crc kubenswrapper[4583]: I1209 15:13:50.664921 4583 generic.go:334] "Generic (PLEG): container finished" podID="5769e572-1356-407d-bddb-dd1e33707c9c" containerID="e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef" exitCode=0 Dec 09 15:13:50 crc kubenswrapper[4583]: I1209 15:13:50.664977 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m5zl9" event={"ID":"5769e572-1356-407d-bddb-dd1e33707c9c","Type":"ContainerDied","Data":"e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef"} Dec 09 15:13:50 crc kubenswrapper[4583]: I1209 15:13:50.665135 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m5zl9" event={"ID":"5769e572-1356-407d-bddb-dd1e33707c9c","Type":"ContainerStarted","Data":"c46ee3b9c1ad782bbbe61afcb460ddd3188f6a4dbfdac2534e2b7b37a959e0b6"} Dec 09 15:13:51 crc kubenswrapper[4583]: I1209 15:13:51.671737 4583 generic.go:334] "Generic (PLEG): container finished" podID="5769e572-1356-407d-bddb-dd1e33707c9c" containerID="fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f" exitCode=0 Dec 09 15:13:51 crc kubenswrapper[4583]: I1209 15:13:51.671792 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m5zl9" event={"ID":"5769e572-1356-407d-bddb-dd1e33707c9c","Type":"ContainerDied","Data":"fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f"} Dec 09 15:13:52 crc kubenswrapper[4583]: I1209 15:13:52.677556 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m5zl9" event={"ID":"5769e572-1356-407d-bddb-dd1e33707c9c","Type":"ContainerStarted","Data":"987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f"} Dec 09 15:13:52 crc kubenswrapper[4583]: I1209 15:13:52.690367 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m5zl9" podStartSLOduration=2.173195963 podStartE2EDuration="3.690354007s" podCreationTimestamp="2025-12-09 15:13:49 +0000 UTC" firstStartedPulling="2025-12-09 15:13:50.666280279 +0000 UTC m=+928.873729377" lastFinishedPulling="2025-12-09 15:13:52.183438333 +0000 UTC m=+930.390887421" observedRunningTime="2025-12-09 15:13:52.688486704 +0000 UTC m=+930.895935792" watchObservedRunningTime="2025-12-09 15:13:52.690354007 +0000 UTC m=+930.897803094" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.391290 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.391525 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.391568 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.392191 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9a6621cc1017866814b05715a584edc254f99c224f8d97253065024c4456b9e5"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.392240 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://9a6621cc1017866814b05715a584edc254f99c224f8d97253065024c4456b9e5" gracePeriod=600 Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.710392 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="9a6621cc1017866814b05715a584edc254f99c224f8d97253065024c4456b9e5" exitCode=0 Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.710455 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"9a6621cc1017866814b05715a584edc254f99c224f8d97253065024c4456b9e5"} Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.710634 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"a8504e920ea8bfb1acadd6f0f401e3aaf03586d01651d76970937dd83e58bba8"} Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.710654 4583 scope.go:117] "RemoveContainer" containerID="2b71e264e7c07931c21a874e87d0e92983343bc552a933cf51a341ffed03af7a" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.791950 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.791986 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:13:59 crc kubenswrapper[4583]: I1209 15:13:59.821679 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:14:00 crc kubenswrapper[4583]: I1209 15:14:00.745918 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:14:00 crc kubenswrapper[4583]: I1209 15:14:00.777396 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m5zl9"] Dec 09 15:14:02 crc kubenswrapper[4583]: I1209 15:14:02.727765 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m5zl9" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="registry-server" containerID="cri-o://987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f" gracePeriod=2 Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.072665 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.232334 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-catalog-content\") pod \"5769e572-1356-407d-bddb-dd1e33707c9c\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.232469 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-utilities\") pod \"5769e572-1356-407d-bddb-dd1e33707c9c\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.232507 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26k6r\" (UniqueName: \"kubernetes.io/projected/5769e572-1356-407d-bddb-dd1e33707c9c-kube-api-access-26k6r\") pod \"5769e572-1356-407d-bddb-dd1e33707c9c\" (UID: \"5769e572-1356-407d-bddb-dd1e33707c9c\") " Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.233246 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-utilities" (OuterVolumeSpecName: "utilities") pod "5769e572-1356-407d-bddb-dd1e33707c9c" (UID: "5769e572-1356-407d-bddb-dd1e33707c9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.244177 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5769e572-1356-407d-bddb-dd1e33707c9c-kube-api-access-26k6r" (OuterVolumeSpecName: "kube-api-access-26k6r") pod "5769e572-1356-407d-bddb-dd1e33707c9c" (UID: "5769e572-1356-407d-bddb-dd1e33707c9c"). InnerVolumeSpecName "kube-api-access-26k6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.269536 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5769e572-1356-407d-bddb-dd1e33707c9c" (UID: "5769e572-1356-407d-bddb-dd1e33707c9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.333829 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.333855 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5769e572-1356-407d-bddb-dd1e33707c9c-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.333865 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26k6r\" (UniqueName: \"kubernetes.io/projected/5769e572-1356-407d-bddb-dd1e33707c9c-kube-api-access-26k6r\") on node \"crc\" DevicePath \"\"" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.734326 4583 generic.go:334] "Generic (PLEG): container finished" podID="5769e572-1356-407d-bddb-dd1e33707c9c" containerID="987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f" exitCode=0 Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.734368 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m5zl9" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.734382 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m5zl9" event={"ID":"5769e572-1356-407d-bddb-dd1e33707c9c","Type":"ContainerDied","Data":"987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f"} Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.734665 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m5zl9" event={"ID":"5769e572-1356-407d-bddb-dd1e33707c9c","Type":"ContainerDied","Data":"c46ee3b9c1ad782bbbe61afcb460ddd3188f6a4dbfdac2534e2b7b37a959e0b6"} Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.734682 4583 scope.go:117] "RemoveContainer" containerID="987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.748545 4583 scope.go:117] "RemoveContainer" containerID="fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.755130 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m5zl9"] Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.759267 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m5zl9"] Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.779915 4583 scope.go:117] "RemoveContainer" containerID="e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.793939 4583 scope.go:117] "RemoveContainer" containerID="987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f" Dec 09 15:14:03 crc kubenswrapper[4583]: E1209 15:14:03.794275 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f\": container with ID starting with 987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f not found: ID does not exist" containerID="987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.794314 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f"} err="failed to get container status \"987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f\": rpc error: code = NotFound desc = could not find container \"987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f\": container with ID starting with 987c07f8f3c13275d43ec2e36b3a34c81d2380aefcf91546b770c13a61c7b41f not found: ID does not exist" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.794340 4583 scope.go:117] "RemoveContainer" containerID="fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f" Dec 09 15:14:03 crc kubenswrapper[4583]: E1209 15:14:03.794626 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f\": container with ID starting with fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f not found: ID does not exist" containerID="fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.794654 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f"} err="failed to get container status \"fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f\": rpc error: code = NotFound desc = could not find container \"fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f\": container with ID starting with fba1dd1101ae3ec5f8f0ab283b2869c653c15341e9030ff635c92c0a784fed0f not found: ID does not exist" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.794673 4583 scope.go:117] "RemoveContainer" containerID="e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef" Dec 09 15:14:03 crc kubenswrapper[4583]: E1209 15:14:03.794965 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef\": container with ID starting with e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef not found: ID does not exist" containerID="e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef" Dec 09 15:14:03 crc kubenswrapper[4583]: I1209 15:14:03.794994 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef"} err="failed to get container status \"e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef\": rpc error: code = NotFound desc = could not find container \"e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef\": container with ID starting with e27fb9882ff917de4d389822736ae5c9039dc527a735edafad2e6edcf611d8ef not found: ID does not exist" Dec 09 15:14:04 crc kubenswrapper[4583]: I1209 15:14:04.084500 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" path="/var/lib/kubelet/pods/5769e572-1356-407d-bddb-dd1e33707c9c/volumes" Dec 09 15:14:22 crc kubenswrapper[4583]: E1209 15:14:22.674980 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:14:22 crc kubenswrapper[4583]: E1209 15:14:22.675369 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:14:22 crc kubenswrapper[4583]: E1209 15:14:22.675512 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fj8xh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-5fr5p_openstack-operators(2ab400fe-20b5-4d42-92bc-e53d9454656e): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:14:22 crc kubenswrapper[4583]: E1209 15:14:22.676648 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-5fr5p" podUID="2ab400fe-20b5-4d42-92bc-e53d9454656e" Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.011622 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.153995 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj8xh\" (UniqueName: \"kubernetes.io/projected/2ab400fe-20b5-4d42-92bc-e53d9454656e-kube-api-access-fj8xh\") pod \"2ab400fe-20b5-4d42-92bc-e53d9454656e\" (UID: \"2ab400fe-20b5-4d42-92bc-e53d9454656e\") " Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.157957 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ab400fe-20b5-4d42-92bc-e53d9454656e-kube-api-access-fj8xh" (OuterVolumeSpecName: "kube-api-access-fj8xh") pod "2ab400fe-20b5-4d42-92bc-e53d9454656e" (UID: "2ab400fe-20b5-4d42-92bc-e53d9454656e"). InnerVolumeSpecName "kube-api-access-fj8xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.255731 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj8xh\" (UniqueName: \"kubernetes.io/projected/2ab400fe-20b5-4d42-92bc-e53d9454656e-kube-api-access-fj8xh\") on node \"crc\" DevicePath \"\"" Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.822558 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5fr5p" event={"ID":"2ab400fe-20b5-4d42-92bc-e53d9454656e","Type":"ContainerDied","Data":"236564a62da58ce3940d642929a03a884779d4e4308cace1b946167c6ac16278"} Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.823144 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5fr5p" Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.852368 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5fr5p"] Dec 09 15:14:23 crc kubenswrapper[4583]: I1209 15:14:23.857579 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-5fr5p"] Dec 09 15:14:24 crc kubenswrapper[4583]: I1209 15:14:24.083816 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ab400fe-20b5-4d42-92bc-e53d9454656e" path="/var/lib/kubelet/pods/2ab400fe-20b5-4d42-92bc-e53d9454656e/volumes" Dec 09 15:14:26 crc kubenswrapper[4583]: E1209 15:14:26.039756 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:14:26 crc kubenswrapper[4583]: E1209 15:14:26.039990 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:14:26 crc kubenswrapper[4583]: E1209 15:14:26.040105 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66tvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4vftt_openstack-operators(9726924c-7078-4a99-adbe-54e2070ef9da): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:14:26 crc kubenswrapper[4583]: E1209 15:14:26.041329 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:14:26 crc kubenswrapper[4583]: E1209 15:14:26.836505 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.134116 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp"] Dec 09 15:15:00 crc kubenswrapper[4583]: E1209 15:15:00.134656 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="registry-server" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.134667 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="registry-server" Dec 09 15:15:00 crc kubenswrapper[4583]: E1209 15:15:00.134678 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="extract-utilities" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.134684 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="extract-utilities" Dec 09 15:15:00 crc kubenswrapper[4583]: E1209 15:15:00.134696 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="extract-content" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.134701 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="extract-content" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.134816 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="5769e572-1356-407d-bddb-dd1e33707c9c" containerName="registry-server" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.135200 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.136447 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.137310 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.140177 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp"] Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.206889 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-config-volume\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.207125 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-secret-volume\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.207165 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q22cv\" (UniqueName: \"kubernetes.io/projected/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-kube-api-access-q22cv\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.308129 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q22cv\" (UniqueName: \"kubernetes.io/projected/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-kube-api-access-q22cv\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.308195 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-config-volume\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.308259 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-secret-volume\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.309111 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-config-volume\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.312269 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-secret-volume\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.321771 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q22cv\" (UniqueName: \"kubernetes.io/projected/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-kube-api-access-q22cv\") pod \"collect-profiles-29421555-8mlgp\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.448584 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.779863 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp"] Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.987139 4583 generic.go:334] "Generic (PLEG): container finished" podID="b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" containerID="3906ed97844a4d1a12ab1dbd498985390372e0dbdfaebe1d253793159c6e9cdf" exitCode=0 Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.987187 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" event={"ID":"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9","Type":"ContainerDied","Data":"3906ed97844a4d1a12ab1dbd498985390372e0dbdfaebe1d253793159c6e9cdf"} Dec 09 15:15:00 crc kubenswrapper[4583]: I1209 15:15:00.987223 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" event={"ID":"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9","Type":"ContainerStarted","Data":"2bf5a5c5778390e2bb5d47d16c04bab337f3f9f3e28d16a2ce9e17839da77d78"} Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.185202 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.237484 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-config-volume\") pod \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.237527 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q22cv\" (UniqueName: \"kubernetes.io/projected/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-kube-api-access-q22cv\") pod \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.237711 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-secret-volume\") pod \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\" (UID: \"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9\") " Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.238130 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-config-volume" (OuterVolumeSpecName: "config-volume") pod "b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" (UID: "b6c3f10f-4dd1-40ea-ae90-db76fff7dae9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.242275 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" (UID: "b6c3f10f-4dd1-40ea-ae90-db76fff7dae9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.242299 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-kube-api-access-q22cv" (OuterVolumeSpecName: "kube-api-access-q22cv") pod "b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" (UID: "b6c3f10f-4dd1-40ea-ae90-db76fff7dae9"). InnerVolumeSpecName "kube-api-access-q22cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.339542 4583 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.339573 4583 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.339583 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q22cv\" (UniqueName: \"kubernetes.io/projected/b6c3f10f-4dd1-40ea-ae90-db76fff7dae9-kube-api-access-q22cv\") on node \"crc\" DevicePath \"\"" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.996265 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" event={"ID":"b6c3f10f-4dd1-40ea-ae90-db76fff7dae9","Type":"ContainerDied","Data":"2bf5a5c5778390e2bb5d47d16c04bab337f3f9f3e28d16a2ce9e17839da77d78"} Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.996445 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bf5a5c5778390e2bb5d47d16c04bab337f3f9f3e28d16a2ce9e17839da77d78" Dec 09 15:15:02 crc kubenswrapper[4583]: I1209 15:15:02.996472 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421555-8mlgp" Dec 09 15:15:59 crc kubenswrapper[4583]: I1209 15:15:59.391673 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:15:59 crc kubenswrapper[4583]: I1209 15:15:59.392127 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:16:29 crc kubenswrapper[4583]: I1209 15:16:29.391524 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:16:29 crc kubenswrapper[4583]: I1209 15:16:29.392106 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:16:38 crc kubenswrapper[4583]: E1209 15:16:38.085835 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:16:38 crc kubenswrapper[4583]: E1209 15:16:38.086126 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:16:38 crc kubenswrapper[4583]: E1209 15:16:38.086283 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66tvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4vftt_openstack-operators(9726924c-7078-4a99-adbe-54e2070ef9da): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:16:38 crc kubenswrapper[4583]: E1209 15:16:38.087495 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:16:53 crc kubenswrapper[4583]: E1209 15:16:53.080761 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.392106 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.392368 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.392408 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.392861 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a8504e920ea8bfb1acadd6f0f401e3aaf03586d01651d76970937dd83e58bba8"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.392908 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://a8504e920ea8bfb1acadd6f0f401e3aaf03586d01651d76970937dd83e58bba8" gracePeriod=600 Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.545604 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="a8504e920ea8bfb1acadd6f0f401e3aaf03586d01651d76970937dd83e58bba8" exitCode=0 Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.545703 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"a8504e920ea8bfb1acadd6f0f401e3aaf03586d01651d76970937dd83e58bba8"} Dec 09 15:16:59 crc kubenswrapper[4583]: I1209 15:16:59.545898 4583 scope.go:117] "RemoveContainer" containerID="9a6621cc1017866814b05715a584edc254f99c224f8d97253065024c4456b9e5" Dec 09 15:17:00 crc kubenswrapper[4583]: I1209 15:17:00.552500 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"d216cad3d58bccacf2a778fd54aa29ca8a3dcc9642372f1a3c27a36c3ae50c5e"} Dec 09 15:18:59 crc kubenswrapper[4583]: I1209 15:18:59.391910 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:18:59 crc kubenswrapper[4583]: I1209 15:18:59.392271 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:19:04 crc kubenswrapper[4583]: E1209 15:19:04.086005 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:19:04 crc kubenswrapper[4583]: E1209 15:19:04.086212 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:19:04 crc kubenswrapper[4583]: E1209 15:19:04.086320 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66tvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4vftt_openstack-operators(9726924c-7078-4a99-adbe-54e2070ef9da): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:19:04 crc kubenswrapper[4583]: E1209 15:19:04.087477 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:19:16 crc kubenswrapper[4583]: E1209 15:19:16.080604 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:19:27 crc kubenswrapper[4583]: E1209 15:19:27.080358 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:19:29 crc kubenswrapper[4583]: I1209 15:19:29.392428 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:19:29 crc kubenswrapper[4583]: I1209 15:19:29.392675 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:19:42 crc kubenswrapper[4583]: E1209 15:19:42.082432 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:19:56 crc kubenswrapper[4583]: I1209 15:19:56.079900 4583 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 09 15:19:59 crc kubenswrapper[4583]: I1209 15:19:59.391195 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:19:59 crc kubenswrapper[4583]: I1209 15:19:59.391418 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:19:59 crc kubenswrapper[4583]: I1209 15:19:59.391455 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:19:59 crc kubenswrapper[4583]: I1209 15:19:59.391920 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d216cad3d58bccacf2a778fd54aa29ca8a3dcc9642372f1a3c27a36c3ae50c5e"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:19:59 crc kubenswrapper[4583]: I1209 15:19:59.391967 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://d216cad3d58bccacf2a778fd54aa29ca8a3dcc9642372f1a3c27a36c3ae50c5e" gracePeriod=600 Dec 09 15:20:00 crc kubenswrapper[4583]: I1209 15:20:00.378394 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="d216cad3d58bccacf2a778fd54aa29ca8a3dcc9642372f1a3c27a36c3ae50c5e" exitCode=0 Dec 09 15:20:00 crc kubenswrapper[4583]: I1209 15:20:00.378694 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"d216cad3d58bccacf2a778fd54aa29ca8a3dcc9642372f1a3c27a36c3ae50c5e"} Dec 09 15:20:00 crc kubenswrapper[4583]: I1209 15:20:00.378735 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26"} Dec 09 15:20:00 crc kubenswrapper[4583]: I1209 15:20:00.378770 4583 scope.go:117] "RemoveContainer" containerID="a8504e920ea8bfb1acadd6f0f401e3aaf03586d01651d76970937dd83e58bba8" Dec 09 15:21:56 crc kubenswrapper[4583]: E1209 15:21:56.083267 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:21:56 crc kubenswrapper[4583]: E1209 15:21:56.083644 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:21:56 crc kubenswrapper[4583]: E1209 15:21:56.083792 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66tvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4vftt_openstack-operators(9726924c-7078-4a99-adbe-54e2070ef9da): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:21:56 crc kubenswrapper[4583]: E1209 15:21:56.084970 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:21:59 crc kubenswrapper[4583]: I1209 15:21:59.391964 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:21:59 crc kubenswrapper[4583]: I1209 15:21:59.392187 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:22:11 crc kubenswrapper[4583]: E1209 15:22:11.079983 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:22:25 crc kubenswrapper[4583]: E1209 15:22:25.080696 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:22:29 crc kubenswrapper[4583]: I1209 15:22:29.391257 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:22:29 crc kubenswrapper[4583]: I1209 15:22:29.392083 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:22:31 crc kubenswrapper[4583]: I1209 15:22:31.920045 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w6q6w"] Dec 09 15:22:31 crc kubenswrapper[4583]: E1209 15:22:31.920452 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" containerName="collect-profiles" Dec 09 15:22:31 crc kubenswrapper[4583]: I1209 15:22:31.920464 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" containerName="collect-profiles" Dec 09 15:22:31 crc kubenswrapper[4583]: I1209 15:22:31.920603 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6c3f10f-4dd1-40ea-ae90-db76fff7dae9" containerName="collect-profiles" Dec 09 15:22:31 crc kubenswrapper[4583]: I1209 15:22:31.921366 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:31 crc kubenswrapper[4583]: I1209 15:22:31.927881 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w6q6w"] Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.075726 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2cww\" (UniqueName: \"kubernetes.io/projected/3298ab68-f22b-48bd-8db3-bf96503ead7f-kube-api-access-q2cww\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.075798 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-catalog-content\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.075878 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-utilities\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.176771 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-utilities\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.177127 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2cww\" (UniqueName: \"kubernetes.io/projected/3298ab68-f22b-48bd-8db3-bf96503ead7f-kube-api-access-q2cww\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.177221 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-catalog-content\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.177300 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-utilities\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.177590 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-catalog-content\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.191841 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2cww\" (UniqueName: \"kubernetes.io/projected/3298ab68-f22b-48bd-8db3-bf96503ead7f-kube-api-access-q2cww\") pod \"redhat-operators-w6q6w\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.233648 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:32 crc kubenswrapper[4583]: I1209 15:22:32.595447 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w6q6w"] Dec 09 15:22:33 crc kubenswrapper[4583]: I1209 15:22:33.049790 4583 generic.go:334] "Generic (PLEG): container finished" podID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerID="f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6" exitCode=0 Dec 09 15:22:33 crc kubenswrapper[4583]: I1209 15:22:33.049824 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerDied","Data":"f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6"} Dec 09 15:22:33 crc kubenswrapper[4583]: I1209 15:22:33.049972 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerStarted","Data":"706873286cee2f898833938c3a77e1f48a2b1404a03046a4a756971777e893e3"} Dec 09 15:22:34 crc kubenswrapper[4583]: I1209 15:22:34.056606 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerStarted","Data":"95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db"} Dec 09 15:22:35 crc kubenswrapper[4583]: I1209 15:22:35.064328 4583 generic.go:334] "Generic (PLEG): container finished" podID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerID="95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db" exitCode=0 Dec 09 15:22:35 crc kubenswrapper[4583]: I1209 15:22:35.064410 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerDied","Data":"95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db"} Dec 09 15:22:36 crc kubenswrapper[4583]: I1209 15:22:36.075178 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerStarted","Data":"1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1"} Dec 09 15:22:36 crc kubenswrapper[4583]: I1209 15:22:36.092596 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w6q6w" podStartSLOduration=2.547503653 podStartE2EDuration="5.092580603s" podCreationTimestamp="2025-12-09 15:22:31 +0000 UTC" firstStartedPulling="2025-12-09 15:22:33.051123105 +0000 UTC m=+1451.258572192" lastFinishedPulling="2025-12-09 15:22:35.596200054 +0000 UTC m=+1453.803649142" observedRunningTime="2025-12-09 15:22:36.088115362 +0000 UTC m=+1454.295564449" watchObservedRunningTime="2025-12-09 15:22:36.092580603 +0000 UTC m=+1454.300029691" Dec 09 15:22:38 crc kubenswrapper[4583]: E1209 15:22:38.079762 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:22:42 crc kubenswrapper[4583]: I1209 15:22:42.234345 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:42 crc kubenswrapper[4583]: I1209 15:22:42.234543 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:42 crc kubenswrapper[4583]: I1209 15:22:42.262872 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:43 crc kubenswrapper[4583]: I1209 15:22:43.134421 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:43 crc kubenswrapper[4583]: I1209 15:22:43.162865 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w6q6w"] Dec 09 15:22:45 crc kubenswrapper[4583]: I1209 15:22:45.113586 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w6q6w" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="registry-server" containerID="cri-o://1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1" gracePeriod=2 Dec 09 15:22:45 crc kubenswrapper[4583]: I1209 15:22:45.913277 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.036990 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-catalog-content\") pod \"3298ab68-f22b-48bd-8db3-bf96503ead7f\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.037060 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2cww\" (UniqueName: \"kubernetes.io/projected/3298ab68-f22b-48bd-8db3-bf96503ead7f-kube-api-access-q2cww\") pod \"3298ab68-f22b-48bd-8db3-bf96503ead7f\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.037112 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-utilities\") pod \"3298ab68-f22b-48bd-8db3-bf96503ead7f\" (UID: \"3298ab68-f22b-48bd-8db3-bf96503ead7f\") " Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.037804 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-utilities" (OuterVolumeSpecName: "utilities") pod "3298ab68-f22b-48bd-8db3-bf96503ead7f" (UID: "3298ab68-f22b-48bd-8db3-bf96503ead7f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.038241 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.041817 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3298ab68-f22b-48bd-8db3-bf96503ead7f-kube-api-access-q2cww" (OuterVolumeSpecName: "kube-api-access-q2cww") pod "3298ab68-f22b-48bd-8db3-bf96503ead7f" (UID: "3298ab68-f22b-48bd-8db3-bf96503ead7f"). InnerVolumeSpecName "kube-api-access-q2cww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.114071 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3298ab68-f22b-48bd-8db3-bf96503ead7f" (UID: "3298ab68-f22b-48bd-8db3-bf96503ead7f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.118773 4583 generic.go:334] "Generic (PLEG): container finished" podID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerID="1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1" exitCode=0 Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.118810 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerDied","Data":"1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1"} Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.118815 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6q6w" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.118839 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6q6w" event={"ID":"3298ab68-f22b-48bd-8db3-bf96503ead7f","Type":"ContainerDied","Data":"706873286cee2f898833938c3a77e1f48a2b1404a03046a4a756971777e893e3"} Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.118855 4583 scope.go:117] "RemoveContainer" containerID="1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.132302 4583 scope.go:117] "RemoveContainer" containerID="95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.139079 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3298ab68-f22b-48bd-8db3-bf96503ead7f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.139099 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2cww\" (UniqueName: \"kubernetes.io/projected/3298ab68-f22b-48bd-8db3-bf96503ead7f-kube-api-access-q2cww\") on node \"crc\" DevicePath \"\"" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.142554 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w6q6w"] Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.146971 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w6q6w"] Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.158013 4583 scope.go:117] "RemoveContainer" containerID="f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.170803 4583 scope.go:117] "RemoveContainer" containerID="1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1" Dec 09 15:22:46 crc kubenswrapper[4583]: E1209 15:22:46.171169 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1\": container with ID starting with 1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1 not found: ID does not exist" containerID="1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.171206 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1"} err="failed to get container status \"1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1\": rpc error: code = NotFound desc = could not find container \"1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1\": container with ID starting with 1fb4021ec48268f9de086cbc22297d9e77ab466947443429ca608a83faff78b1 not found: ID does not exist" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.171229 4583 scope.go:117] "RemoveContainer" containerID="95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db" Dec 09 15:22:46 crc kubenswrapper[4583]: E1209 15:22:46.171494 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db\": container with ID starting with 95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db not found: ID does not exist" containerID="95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.171521 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db"} err="failed to get container status \"95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db\": rpc error: code = NotFound desc = could not find container \"95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db\": container with ID starting with 95c81a4610564515706cf97afc79231403a22c41daa7e8df2a9eb48dd9a365db not found: ID does not exist" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.171541 4583 scope.go:117] "RemoveContainer" containerID="f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6" Dec 09 15:22:46 crc kubenswrapper[4583]: E1209 15:22:46.171792 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6\": container with ID starting with f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6 not found: ID does not exist" containerID="f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6" Dec 09 15:22:46 crc kubenswrapper[4583]: I1209 15:22:46.171811 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6"} err="failed to get container status \"f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6\": rpc error: code = NotFound desc = could not find container \"f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6\": container with ID starting with f9d1e459c6e5fd899e1b5553b788c8c7548973f4cfc569fb8d2471c8e05f59a6 not found: ID does not exist" Dec 09 15:22:48 crc kubenswrapper[4583]: I1209 15:22:48.090235 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" path="/var/lib/kubelet/pods/3298ab68-f22b-48bd-8db3-bf96503ead7f/volumes" Dec 09 15:22:52 crc kubenswrapper[4583]: E1209 15:22:52.082459 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:22:59 crc kubenswrapper[4583]: I1209 15:22:59.391891 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:22:59 crc kubenswrapper[4583]: I1209 15:22:59.392291 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:22:59 crc kubenswrapper[4583]: I1209 15:22:59.392333 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:22:59 crc kubenswrapper[4583]: I1209 15:22:59.392883 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:22:59 crc kubenswrapper[4583]: I1209 15:22:59.392933 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" gracePeriod=600 Dec 09 15:22:59 crc kubenswrapper[4583]: E1209 15:22:59.506613 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:23:00 crc kubenswrapper[4583]: I1209 15:23:00.182631 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" exitCode=0 Dec 09 15:23:00 crc kubenswrapper[4583]: I1209 15:23:00.182667 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26"} Dec 09 15:23:00 crc kubenswrapper[4583]: I1209 15:23:00.182696 4583 scope.go:117] "RemoveContainer" containerID="d216cad3d58bccacf2a778fd54aa29ca8a3dcc9642372f1a3c27a36c3ae50c5e" Dec 09 15:23:00 crc kubenswrapper[4583]: I1209 15:23:00.183142 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:23:00 crc kubenswrapper[4583]: E1209 15:23:00.183327 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:23:04 crc kubenswrapper[4583]: E1209 15:23:04.079991 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:23:13 crc kubenswrapper[4583]: I1209 15:23:13.078811 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:23:13 crc kubenswrapper[4583]: E1209 15:23:13.079310 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:23:16 crc kubenswrapper[4583]: E1209 15:23:16.080447 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:23:28 crc kubenswrapper[4583]: I1209 15:23:28.078812 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:23:28 crc kubenswrapper[4583]: E1209 15:23:28.079350 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:23:40 crc kubenswrapper[4583]: I1209 15:23:40.079041 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:23:40 crc kubenswrapper[4583]: E1209 15:23:40.080123 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:23:53 crc kubenswrapper[4583]: I1209 15:23:53.078288 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:23:53 crc kubenswrapper[4583]: E1209 15:23:53.078842 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.618398 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z76nt"] Dec 09 15:24:02 crc kubenswrapper[4583]: E1209 15:24:02.618960 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="extract-content" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.618973 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="extract-content" Dec 09 15:24:02 crc kubenswrapper[4583]: E1209 15:24:02.618986 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="extract-utilities" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.618991 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="extract-utilities" Dec 09 15:24:02 crc kubenswrapper[4583]: E1209 15:24:02.619000 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="registry-server" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.619006 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="registry-server" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.619117 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="3298ab68-f22b-48bd-8db3-bf96503ead7f" containerName="registry-server" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.619912 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.624893 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z76nt"] Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.716456 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpfxf\" (UniqueName: \"kubernetes.io/projected/25617e0a-b196-4d56-89f7-0278522804db-kube-api-access-zpfxf\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.716497 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-catalog-content\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.716520 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-utilities\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.817960 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpfxf\" (UniqueName: \"kubernetes.io/projected/25617e0a-b196-4d56-89f7-0278522804db-kube-api-access-zpfxf\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.818007 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-catalog-content\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.818030 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-utilities\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.818448 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-catalog-content\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.818520 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-utilities\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.833976 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpfxf\" (UniqueName: \"kubernetes.io/projected/25617e0a-b196-4d56-89f7-0278522804db-kube-api-access-zpfxf\") pod \"community-operators-z76nt\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:02 crc kubenswrapper[4583]: I1209 15:24:02.936298 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:03 crc kubenswrapper[4583]: I1209 15:24:03.274617 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z76nt"] Dec 09 15:24:03 crc kubenswrapper[4583]: I1209 15:24:03.489304 4583 generic.go:334] "Generic (PLEG): container finished" podID="25617e0a-b196-4d56-89f7-0278522804db" containerID="67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5" exitCode=0 Dec 09 15:24:03 crc kubenswrapper[4583]: I1209 15:24:03.489362 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z76nt" event={"ID":"25617e0a-b196-4d56-89f7-0278522804db","Type":"ContainerDied","Data":"67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5"} Dec 09 15:24:03 crc kubenswrapper[4583]: I1209 15:24:03.489530 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z76nt" event={"ID":"25617e0a-b196-4d56-89f7-0278522804db","Type":"ContainerStarted","Data":"f7d106b325478356ae59357ddd5ae7d6307cf323220ad0fafca4767acd1bfc05"} Dec 09 15:24:04 crc kubenswrapper[4583]: I1209 15:24:04.495511 4583 generic.go:334] "Generic (PLEG): container finished" podID="25617e0a-b196-4d56-89f7-0278522804db" containerID="729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c" exitCode=0 Dec 09 15:24:04 crc kubenswrapper[4583]: I1209 15:24:04.495551 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z76nt" event={"ID":"25617e0a-b196-4d56-89f7-0278522804db","Type":"ContainerDied","Data":"729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c"} Dec 09 15:24:05 crc kubenswrapper[4583]: I1209 15:24:05.078411 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:24:05 crc kubenswrapper[4583]: E1209 15:24:05.078795 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:24:05 crc kubenswrapper[4583]: I1209 15:24:05.502955 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z76nt" event={"ID":"25617e0a-b196-4d56-89f7-0278522804db","Type":"ContainerStarted","Data":"46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3"} Dec 09 15:24:05 crc kubenswrapper[4583]: I1209 15:24:05.515309 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z76nt" podStartSLOduration=2.027254472 podStartE2EDuration="3.515295498s" podCreationTimestamp="2025-12-09 15:24:02 +0000 UTC" firstStartedPulling="2025-12-09 15:24:03.490960043 +0000 UTC m=+1541.698409131" lastFinishedPulling="2025-12-09 15:24:04.979001069 +0000 UTC m=+1543.186450157" observedRunningTime="2025-12-09 15:24:05.514479802 +0000 UTC m=+1543.721928890" watchObservedRunningTime="2025-12-09 15:24:05.515295498 +0000 UTC m=+1543.722744586" Dec 09 15:24:12 crc kubenswrapper[4583]: I1209 15:24:12.936704 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:12 crc kubenswrapper[4583]: I1209 15:24:12.936966 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:12 crc kubenswrapper[4583]: I1209 15:24:12.966487 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:13 crc kubenswrapper[4583]: I1209 15:24:13.566967 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:13 crc kubenswrapper[4583]: I1209 15:24:13.599213 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z76nt"] Dec 09 15:24:15 crc kubenswrapper[4583]: I1209 15:24:15.550254 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z76nt" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="registry-server" containerID="cri-o://46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3" gracePeriod=2 Dec 09 15:24:15 crc kubenswrapper[4583]: I1209 15:24:15.891547 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.067149 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-catalog-content\") pod \"25617e0a-b196-4d56-89f7-0278522804db\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.067230 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-utilities\") pod \"25617e0a-b196-4d56-89f7-0278522804db\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.067266 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpfxf\" (UniqueName: \"kubernetes.io/projected/25617e0a-b196-4d56-89f7-0278522804db-kube-api-access-zpfxf\") pod \"25617e0a-b196-4d56-89f7-0278522804db\" (UID: \"25617e0a-b196-4d56-89f7-0278522804db\") " Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.067908 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-utilities" (OuterVolumeSpecName: "utilities") pod "25617e0a-b196-4d56-89f7-0278522804db" (UID: "25617e0a-b196-4d56-89f7-0278522804db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.071884 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25617e0a-b196-4d56-89f7-0278522804db-kube-api-access-zpfxf" (OuterVolumeSpecName: "kube-api-access-zpfxf") pod "25617e0a-b196-4d56-89f7-0278522804db" (UID: "25617e0a-b196-4d56-89f7-0278522804db"). InnerVolumeSpecName "kube-api-access-zpfxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.102322 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25617e0a-b196-4d56-89f7-0278522804db" (UID: "25617e0a-b196-4d56-89f7-0278522804db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.168200 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.168225 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpfxf\" (UniqueName: \"kubernetes.io/projected/25617e0a-b196-4d56-89f7-0278522804db-kube-api-access-zpfxf\") on node \"crc\" DevicePath \"\"" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.168236 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25617e0a-b196-4d56-89f7-0278522804db-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.556399 4583 generic.go:334] "Generic (PLEG): container finished" podID="25617e0a-b196-4d56-89f7-0278522804db" containerID="46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3" exitCode=0 Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.556459 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z76nt" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.556989 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z76nt" event={"ID":"25617e0a-b196-4d56-89f7-0278522804db","Type":"ContainerDied","Data":"46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3"} Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.557037 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z76nt" event={"ID":"25617e0a-b196-4d56-89f7-0278522804db","Type":"ContainerDied","Data":"f7d106b325478356ae59357ddd5ae7d6307cf323220ad0fafca4767acd1bfc05"} Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.557057 4583 scope.go:117] "RemoveContainer" containerID="46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.570517 4583 scope.go:117] "RemoveContainer" containerID="729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.576727 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z76nt"] Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.583086 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z76nt"] Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.597338 4583 scope.go:117] "RemoveContainer" containerID="67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.609622 4583 scope.go:117] "RemoveContainer" containerID="46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3" Dec 09 15:24:16 crc kubenswrapper[4583]: E1209 15:24:16.610040 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3\": container with ID starting with 46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3 not found: ID does not exist" containerID="46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.610068 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3"} err="failed to get container status \"46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3\": rpc error: code = NotFound desc = could not find container \"46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3\": container with ID starting with 46f7057e260998b240954b3240a490437e6adb07e91fa0ce9a5114435a3f24a3 not found: ID does not exist" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.610105 4583 scope.go:117] "RemoveContainer" containerID="729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c" Dec 09 15:24:16 crc kubenswrapper[4583]: E1209 15:24:16.610806 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c\": container with ID starting with 729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c not found: ID does not exist" containerID="729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.610831 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c"} err="failed to get container status \"729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c\": rpc error: code = NotFound desc = could not find container \"729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c\": container with ID starting with 729d136b5dcca9fad8b4faffb519b5e59131a1785c6808886773bc461fe7eb1c not found: ID does not exist" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.610847 4583 scope.go:117] "RemoveContainer" containerID="67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5" Dec 09 15:24:16 crc kubenswrapper[4583]: E1209 15:24:16.611138 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5\": container with ID starting with 67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5 not found: ID does not exist" containerID="67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5" Dec 09 15:24:16 crc kubenswrapper[4583]: I1209 15:24:16.611175 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5"} err="failed to get container status \"67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5\": rpc error: code = NotFound desc = could not find container \"67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5\": container with ID starting with 67307ff9bd167458d396386cc25f62a792a3a45e74f377aee513b01e3db54da5 not found: ID does not exist" Dec 09 15:24:18 crc kubenswrapper[4583]: I1209 15:24:18.084766 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25617e0a-b196-4d56-89f7-0278522804db" path="/var/lib/kubelet/pods/25617e0a-b196-4d56-89f7-0278522804db/volumes" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.078725 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:24:19 crc kubenswrapper[4583]: E1209 15:24:19.078950 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.988290 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vcbvr"] Dec 09 15:24:19 crc kubenswrapper[4583]: E1209 15:24:19.988497 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="registry-server" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.988509 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="registry-server" Dec 09 15:24:19 crc kubenswrapper[4583]: E1209 15:24:19.988528 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="extract-content" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.988534 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="extract-content" Dec 09 15:24:19 crc kubenswrapper[4583]: E1209 15:24:19.988546 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="extract-utilities" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.988551 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="extract-utilities" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.988671 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="25617e0a-b196-4d56-89f7-0278522804db" containerName="registry-server" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.989436 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:19 crc kubenswrapper[4583]: I1209 15:24:19.996476 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcbvr"] Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.011486 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-catalog-content\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.011568 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-utilities\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.011601 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9ggx\" (UniqueName: \"kubernetes.io/projected/69f6c6db-4797-4dff-abfa-de0a90114d2e-kube-api-access-z9ggx\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.112489 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-catalog-content\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.112577 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-utilities\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.112609 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9ggx\" (UniqueName: \"kubernetes.io/projected/69f6c6db-4797-4dff-abfa-de0a90114d2e-kube-api-access-z9ggx\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.112960 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-catalog-content\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.113047 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-utilities\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.128198 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9ggx\" (UniqueName: \"kubernetes.io/projected/69f6c6db-4797-4dff-abfa-de0a90114d2e-kube-api-access-z9ggx\") pod \"redhat-marketplace-vcbvr\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.303210 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:20 crc kubenswrapper[4583]: I1209 15:24:20.649803 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcbvr"] Dec 09 15:24:20 crc kubenswrapper[4583]: W1209 15:24:20.653643 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69f6c6db_4797_4dff_abfa_de0a90114d2e.slice/crio-7eb648ebad5ea10cebaad8e7f91c06183c95db8b35c2cca8d9142674a02284e3 WatchSource:0}: Error finding container 7eb648ebad5ea10cebaad8e7f91c06183c95db8b35c2cca8d9142674a02284e3: Status 404 returned error can't find the container with id 7eb648ebad5ea10cebaad8e7f91c06183c95db8b35c2cca8d9142674a02284e3 Dec 09 15:24:21 crc kubenswrapper[4583]: I1209 15:24:21.582517 4583 generic.go:334] "Generic (PLEG): container finished" podID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerID="04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f" exitCode=0 Dec 09 15:24:21 crc kubenswrapper[4583]: I1209 15:24:21.582571 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcbvr" event={"ID":"69f6c6db-4797-4dff-abfa-de0a90114d2e","Type":"ContainerDied","Data":"04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f"} Dec 09 15:24:21 crc kubenswrapper[4583]: I1209 15:24:21.582728 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcbvr" event={"ID":"69f6c6db-4797-4dff-abfa-de0a90114d2e","Type":"ContainerStarted","Data":"7eb648ebad5ea10cebaad8e7f91c06183c95db8b35c2cca8d9142674a02284e3"} Dec 09 15:24:22 crc kubenswrapper[4583]: I1209 15:24:22.589511 4583 generic.go:334] "Generic (PLEG): container finished" podID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerID="4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6" exitCode=0 Dec 09 15:24:22 crc kubenswrapper[4583]: I1209 15:24:22.589628 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcbvr" event={"ID":"69f6c6db-4797-4dff-abfa-de0a90114d2e","Type":"ContainerDied","Data":"4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6"} Dec 09 15:24:23 crc kubenswrapper[4583]: I1209 15:24:23.596443 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcbvr" event={"ID":"69f6c6db-4797-4dff-abfa-de0a90114d2e","Type":"ContainerStarted","Data":"aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9"} Dec 09 15:24:23 crc kubenswrapper[4583]: I1209 15:24:23.611847 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vcbvr" podStartSLOduration=3.107621834 podStartE2EDuration="4.611834047s" podCreationTimestamp="2025-12-09 15:24:19 +0000 UTC" firstStartedPulling="2025-12-09 15:24:21.583796567 +0000 UTC m=+1559.791245655" lastFinishedPulling="2025-12-09 15:24:23.08800878 +0000 UTC m=+1561.295457868" observedRunningTime="2025-12-09 15:24:23.607724464 +0000 UTC m=+1561.815173552" watchObservedRunningTime="2025-12-09 15:24:23.611834047 +0000 UTC m=+1561.819283134" Dec 09 15:24:30 crc kubenswrapper[4583]: I1209 15:24:30.079284 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:24:30 crc kubenswrapper[4583]: E1209 15:24:30.079763 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:24:30 crc kubenswrapper[4583]: I1209 15:24:30.303595 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:30 crc kubenswrapper[4583]: I1209 15:24:30.303632 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:30 crc kubenswrapper[4583]: I1209 15:24:30.332957 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:30 crc kubenswrapper[4583]: I1209 15:24:30.657562 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:30 crc kubenswrapper[4583]: I1209 15:24:30.694541 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcbvr"] Dec 09 15:24:32 crc kubenswrapper[4583]: I1209 15:24:32.639640 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vcbvr" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="registry-server" containerID="cri-o://aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9" gracePeriod=2 Dec 09 15:24:32 crc kubenswrapper[4583]: I1209 15:24:32.956906 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.066567 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-utilities\") pod \"69f6c6db-4797-4dff-abfa-de0a90114d2e\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.066608 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9ggx\" (UniqueName: \"kubernetes.io/projected/69f6c6db-4797-4dff-abfa-de0a90114d2e-kube-api-access-z9ggx\") pod \"69f6c6db-4797-4dff-abfa-de0a90114d2e\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.066790 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-catalog-content\") pod \"69f6c6db-4797-4dff-abfa-de0a90114d2e\" (UID: \"69f6c6db-4797-4dff-abfa-de0a90114d2e\") " Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.067238 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-utilities" (OuterVolumeSpecName: "utilities") pod "69f6c6db-4797-4dff-abfa-de0a90114d2e" (UID: "69f6c6db-4797-4dff-abfa-de0a90114d2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.070903 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69f6c6db-4797-4dff-abfa-de0a90114d2e-kube-api-access-z9ggx" (OuterVolumeSpecName: "kube-api-access-z9ggx") pod "69f6c6db-4797-4dff-abfa-de0a90114d2e" (UID: "69f6c6db-4797-4dff-abfa-de0a90114d2e"). InnerVolumeSpecName "kube-api-access-z9ggx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.079602 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69f6c6db-4797-4dff-abfa-de0a90114d2e" (UID: "69f6c6db-4797-4dff-abfa-de0a90114d2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.168868 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.168898 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f6c6db-4797-4dff-abfa-de0a90114d2e-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.168907 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9ggx\" (UniqueName: \"kubernetes.io/projected/69f6c6db-4797-4dff-abfa-de0a90114d2e-kube-api-access-z9ggx\") on node \"crc\" DevicePath \"\"" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.646909 4583 generic.go:334] "Generic (PLEG): container finished" podID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerID="aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9" exitCode=0 Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.646950 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcbvr" event={"ID":"69f6c6db-4797-4dff-abfa-de0a90114d2e","Type":"ContainerDied","Data":"aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9"} Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.646960 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcbvr" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.646975 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcbvr" event={"ID":"69f6c6db-4797-4dff-abfa-de0a90114d2e","Type":"ContainerDied","Data":"7eb648ebad5ea10cebaad8e7f91c06183c95db8b35c2cca8d9142674a02284e3"} Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.646991 4583 scope.go:117] "RemoveContainer" containerID="aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.660188 4583 scope.go:117] "RemoveContainer" containerID="4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.667230 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcbvr"] Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.671029 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcbvr"] Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.673028 4583 scope.go:117] "RemoveContainer" containerID="04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.690461 4583 scope.go:117] "RemoveContainer" containerID="aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9" Dec 09 15:24:33 crc kubenswrapper[4583]: E1209 15:24:33.690769 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9\": container with ID starting with aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9 not found: ID does not exist" containerID="aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.690798 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9"} err="failed to get container status \"aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9\": rpc error: code = NotFound desc = could not find container \"aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9\": container with ID starting with aaacd95299150ce963d89042678fa7ace3c9ca8fa543ca192d35a11f0e8860e9 not found: ID does not exist" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.690815 4583 scope.go:117] "RemoveContainer" containerID="4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6" Dec 09 15:24:33 crc kubenswrapper[4583]: E1209 15:24:33.691099 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6\": container with ID starting with 4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6 not found: ID does not exist" containerID="4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.691135 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6"} err="failed to get container status \"4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6\": rpc error: code = NotFound desc = could not find container \"4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6\": container with ID starting with 4a62b76cbe31a5fb4e9bdd56517160e939fd85b740eef86dd74965740fea07a6 not found: ID does not exist" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.691159 4583 scope.go:117] "RemoveContainer" containerID="04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f" Dec 09 15:24:33 crc kubenswrapper[4583]: E1209 15:24:33.691450 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f\": container with ID starting with 04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f not found: ID does not exist" containerID="04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f" Dec 09 15:24:33 crc kubenswrapper[4583]: I1209 15:24:33.691472 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f"} err="failed to get container status \"04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f\": rpc error: code = NotFound desc = could not find container \"04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f\": container with ID starting with 04b3bc2b03ff5cb8390d2cb3e7e0ceb9921f18d2c26c602ef9cf7587c391862f not found: ID does not exist" Dec 09 15:24:34 crc kubenswrapper[4583]: I1209 15:24:34.084425 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" path="/var/lib/kubelet/pods/69f6c6db-4797-4dff-abfa-de0a90114d2e/volumes" Dec 09 15:24:42 crc kubenswrapper[4583]: I1209 15:24:42.081435 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:24:42 crc kubenswrapper[4583]: E1209 15:24:42.082201 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:24:53 crc kubenswrapper[4583]: I1209 15:24:53.078124 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:24:53 crc kubenswrapper[4583]: E1209 15:24:53.078622 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:25:07 crc kubenswrapper[4583]: I1209 15:25:07.078189 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:25:07 crc kubenswrapper[4583]: E1209 15:25:07.078718 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:25:18 crc kubenswrapper[4583]: I1209 15:25:18.078543 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:25:18 crc kubenswrapper[4583]: E1209 15:25:18.079816 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:25:27 crc kubenswrapper[4583]: E1209 15:25:27.084083 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:25:27 crc kubenswrapper[4583]: E1209 15:25:27.084439 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:25:27 crc kubenswrapper[4583]: E1209 15:25:27.084538 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66tvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4vftt_openstack-operators(9726924c-7078-4a99-adbe-54e2070ef9da): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:25:27 crc kubenswrapper[4583]: E1209 15:25:27.085713 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:25:29 crc kubenswrapper[4583]: I1209 15:25:29.078009 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:25:29 crc kubenswrapper[4583]: E1209 15:25:29.078247 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:25:39 crc kubenswrapper[4583]: E1209 15:25:39.079922 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:25:44 crc kubenswrapper[4583]: I1209 15:25:44.078155 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:25:44 crc kubenswrapper[4583]: E1209 15:25:44.078600 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:25:53 crc kubenswrapper[4583]: E1209 15:25:53.079574 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:25:59 crc kubenswrapper[4583]: I1209 15:25:59.078148 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:25:59 crc kubenswrapper[4583]: E1209 15:25:59.078766 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:26:07 crc kubenswrapper[4583]: E1209 15:26:07.080053 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:26:12 crc kubenswrapper[4583]: I1209 15:26:12.081598 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:26:12 crc kubenswrapper[4583]: E1209 15:26:12.082144 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:26:22 crc kubenswrapper[4583]: E1209 15:26:22.090627 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:26:23 crc kubenswrapper[4583]: I1209 15:26:23.078697 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:26:23 crc kubenswrapper[4583]: E1209 15:26:23.078950 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:26:33 crc kubenswrapper[4583]: E1209 15:26:33.080032 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:26:37 crc kubenswrapper[4583]: I1209 15:26:37.078620 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:26:37 crc kubenswrapper[4583]: E1209 15:26:37.079084 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:26:44 crc kubenswrapper[4583]: E1209 15:26:44.080013 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:26:50 crc kubenswrapper[4583]: I1209 15:26:50.078362 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:26:50 crc kubenswrapper[4583]: E1209 15:26:50.079558 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:26:57 crc kubenswrapper[4583]: E1209 15:26:57.080355 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:27:03 crc kubenswrapper[4583]: I1209 15:27:03.078810 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:27:03 crc kubenswrapper[4583]: E1209 15:27:03.079216 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:27:09 crc kubenswrapper[4583]: E1209 15:27:09.079518 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:27:14 crc kubenswrapper[4583]: I1209 15:27:14.078894 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:27:14 crc kubenswrapper[4583]: E1209 15:27:14.079106 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:27:24 crc kubenswrapper[4583]: E1209 15:27:24.080088 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:27:26 crc kubenswrapper[4583]: I1209 15:27:26.078533 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:27:26 crc kubenswrapper[4583]: E1209 15:27:26.078927 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:27:38 crc kubenswrapper[4583]: E1209 15:27:38.080254 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:27:41 crc kubenswrapper[4583]: I1209 15:27:41.078713 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:27:41 crc kubenswrapper[4583]: E1209 15:27:41.079290 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:27:49 crc kubenswrapper[4583]: E1209 15:27:49.080247 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:27:52 crc kubenswrapper[4583]: I1209 15:27:52.081355 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:27:52 crc kubenswrapper[4583]: E1209 15:27:52.081770 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jmtvl_openshift-machine-config-operator(7ce1d933-c482-444a-86c2-3e9f1315eb16)\"" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" Dec 09 15:28:04 crc kubenswrapper[4583]: E1209 15:28:04.080299 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.840863 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q8ghq/must-gather-txrz4"] Dec 09 15:28:05 crc kubenswrapper[4583]: E1209 15:28:05.841278 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="extract-utilities" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.841290 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="extract-utilities" Dec 09 15:28:05 crc kubenswrapper[4583]: E1209 15:28:05.841296 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="registry-server" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.841302 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="registry-server" Dec 09 15:28:05 crc kubenswrapper[4583]: E1209 15:28:05.841321 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="extract-content" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.841328 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="extract-content" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.841450 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="69f6c6db-4797-4dff-abfa-de0a90114d2e" containerName="registry-server" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.842043 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.844133 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-q8ghq"/"default-dockercfg-dnjhp" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.844238 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-q8ghq"/"kube-root-ca.crt" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.844607 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-q8ghq"/"openshift-service-ca.crt" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.854311 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-must-gather-output\") pod \"must-gather-txrz4\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.854387 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzvz6\" (UniqueName: \"kubernetes.io/projected/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-kube-api-access-rzvz6\") pod \"must-gather-txrz4\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.855484 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-q8ghq/must-gather-txrz4"] Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.955449 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-must-gather-output\") pod \"must-gather-txrz4\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.955509 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzvz6\" (UniqueName: \"kubernetes.io/projected/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-kube-api-access-rzvz6\") pod \"must-gather-txrz4\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.955889 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-must-gather-output\") pod \"must-gather-txrz4\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:05 crc kubenswrapper[4583]: I1209 15:28:05.975555 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzvz6\" (UniqueName: \"kubernetes.io/projected/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-kube-api-access-rzvz6\") pod \"must-gather-txrz4\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:06 crc kubenswrapper[4583]: I1209 15:28:06.078770 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:28:06 crc kubenswrapper[4583]: I1209 15:28:06.155511 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:28:06 crc kubenswrapper[4583]: I1209 15:28:06.493403 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-q8ghq/must-gather-txrz4"] Dec 09 15:28:06 crc kubenswrapper[4583]: I1209 15:28:06.500170 4583 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 09 15:28:06 crc kubenswrapper[4583]: I1209 15:28:06.657740 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"5a7c52aeabced8f171b4e08eaf853c958ac8f440569632537e9b96a6d1620241"} Dec 09 15:28:06 crc kubenswrapper[4583]: I1209 15:28:06.658766 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q8ghq/must-gather-txrz4" event={"ID":"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9","Type":"ContainerStarted","Data":"668298ddcf0544289f29721209e04586e0ae54030a0a383c729b9c3834f1eec5"} Dec 09 15:28:13 crc kubenswrapper[4583]: I1209 15:28:13.693341 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q8ghq/must-gather-txrz4" event={"ID":"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9","Type":"ContainerStarted","Data":"681c480a279c9b41761366d3edc46937bc63af7757165a835be9a05cf084ec62"} Dec 09 15:28:13 crc kubenswrapper[4583]: I1209 15:28:13.693703 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q8ghq/must-gather-txrz4" event={"ID":"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9","Type":"ContainerStarted","Data":"315890ee797281243a5c533e5def2ad5598090a5a9ff994d598331eaa1f20155"} Dec 09 15:28:13 crc kubenswrapper[4583]: I1209 15:28:13.706954 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-q8ghq/must-gather-txrz4" podStartSLOduration=2.10606924 podStartE2EDuration="8.706942309s" podCreationTimestamp="2025-12-09 15:28:05 +0000 UTC" firstStartedPulling="2025-12-09 15:28:06.498403929 +0000 UTC m=+1784.705853018" lastFinishedPulling="2025-12-09 15:28:13.099276999 +0000 UTC m=+1791.306726087" observedRunningTime="2025-12-09 15:28:13.702455535 +0000 UTC m=+1791.909904623" watchObservedRunningTime="2025-12-09 15:28:13.706942309 +0000 UTC m=+1791.914391397" Dec 09 15:28:21 crc kubenswrapper[4583]: I1209 15:28:21.953823 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-285nk"] Dec 09 15:28:21 crc kubenswrapper[4583]: I1209 15:28:21.955054 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:21 crc kubenswrapper[4583]: I1209 15:28:21.964930 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-285nk"] Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.047290 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svm5f\" (UniqueName: \"kubernetes.io/projected/1f70d903-dc75-4d59-97b1-a88eff7b308d-kube-api-access-svm5f\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.047537 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-utilities\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.047584 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-catalog-content\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.148774 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-utilities\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.148824 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-catalog-content\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.148945 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svm5f\" (UniqueName: \"kubernetes.io/projected/1f70d903-dc75-4d59-97b1-a88eff7b308d-kube-api-access-svm5f\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.149227 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-utilities\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.149265 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-catalog-content\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.165614 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svm5f\" (UniqueName: \"kubernetes.io/projected/1f70d903-dc75-4d59-97b1-a88eff7b308d-kube-api-access-svm5f\") pod \"certified-operators-285nk\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.269369 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.691431 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-285nk"] Dec 09 15:28:22 crc kubenswrapper[4583]: W1209 15:28:22.697496 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f70d903_dc75_4d59_97b1_a88eff7b308d.slice/crio-0436fce3e069d7941d2f9e9b64a0e0ad882d84f3f450bd424fc249f75d320b91 WatchSource:0}: Error finding container 0436fce3e069d7941d2f9e9b64a0e0ad882d84f3f450bd424fc249f75d320b91: Status 404 returned error can't find the container with id 0436fce3e069d7941d2f9e9b64a0e0ad882d84f3f450bd424fc249f75d320b91 Dec 09 15:28:22 crc kubenswrapper[4583]: I1209 15:28:22.738143 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-285nk" event={"ID":"1f70d903-dc75-4d59-97b1-a88eff7b308d","Type":"ContainerStarted","Data":"0436fce3e069d7941d2f9e9b64a0e0ad882d84f3f450bd424fc249f75d320b91"} Dec 09 15:28:23 crc kubenswrapper[4583]: I1209 15:28:23.744175 4583 generic.go:334] "Generic (PLEG): container finished" podID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerID="dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af" exitCode=0 Dec 09 15:28:23 crc kubenswrapper[4583]: I1209 15:28:23.744509 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-285nk" event={"ID":"1f70d903-dc75-4d59-97b1-a88eff7b308d","Type":"ContainerDied","Data":"dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af"} Dec 09 15:28:24 crc kubenswrapper[4583]: I1209 15:28:24.750826 4583 generic.go:334] "Generic (PLEG): container finished" podID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerID="d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06" exitCode=0 Dec 09 15:28:24 crc kubenswrapper[4583]: I1209 15:28:24.750921 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-285nk" event={"ID":"1f70d903-dc75-4d59-97b1-a88eff7b308d","Type":"ContainerDied","Data":"d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06"} Dec 09 15:28:25 crc kubenswrapper[4583]: I1209 15:28:25.758258 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-285nk" event={"ID":"1f70d903-dc75-4d59-97b1-a88eff7b308d","Type":"ContainerStarted","Data":"6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2"} Dec 09 15:28:25 crc kubenswrapper[4583]: I1209 15:28:25.772810 4583 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-285nk" podStartSLOduration=3.323134545 podStartE2EDuration="4.772797914s" podCreationTimestamp="2025-12-09 15:28:21 +0000 UTC" firstStartedPulling="2025-12-09 15:28:23.746118594 +0000 UTC m=+1801.953567683" lastFinishedPulling="2025-12-09 15:28:25.195781963 +0000 UTC m=+1803.403231052" observedRunningTime="2025-12-09 15:28:25.770102118 +0000 UTC m=+1803.977551206" watchObservedRunningTime="2025-12-09 15:28:25.772797914 +0000 UTC m=+1803.980247002" Dec 09 15:28:32 crc kubenswrapper[4583]: I1209 15:28:32.270386 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:32 crc kubenswrapper[4583]: I1209 15:28:32.270757 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:32 crc kubenswrapper[4583]: I1209 15:28:32.301554 4583 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:32 crc kubenswrapper[4583]: I1209 15:28:32.818303 4583 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:32 crc kubenswrapper[4583]: I1209 15:28:32.854918 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-285nk"] Dec 09 15:28:34 crc kubenswrapper[4583]: I1209 15:28:34.800732 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-285nk" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="registry-server" containerID="cri-o://6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2" gracePeriod=2 Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.106407 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.208944 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-utilities\") pod \"1f70d903-dc75-4d59-97b1-a88eff7b308d\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.209013 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-catalog-content\") pod \"1f70d903-dc75-4d59-97b1-a88eff7b308d\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.209200 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svm5f\" (UniqueName: \"kubernetes.io/projected/1f70d903-dc75-4d59-97b1-a88eff7b308d-kube-api-access-svm5f\") pod \"1f70d903-dc75-4d59-97b1-a88eff7b308d\" (UID: \"1f70d903-dc75-4d59-97b1-a88eff7b308d\") " Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.209649 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-utilities" (OuterVolumeSpecName: "utilities") pod "1f70d903-dc75-4d59-97b1-a88eff7b308d" (UID: "1f70d903-dc75-4d59-97b1-a88eff7b308d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.213400 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f70d903-dc75-4d59-97b1-a88eff7b308d-kube-api-access-svm5f" (OuterVolumeSpecName: "kube-api-access-svm5f") pod "1f70d903-dc75-4d59-97b1-a88eff7b308d" (UID: "1f70d903-dc75-4d59-97b1-a88eff7b308d"). InnerVolumeSpecName "kube-api-access-svm5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.244129 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f70d903-dc75-4d59-97b1-a88eff7b308d" (UID: "1f70d903-dc75-4d59-97b1-a88eff7b308d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.311598 4583 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-utilities\") on node \"crc\" DevicePath \"\"" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.311730 4583 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f70d903-dc75-4d59-97b1-a88eff7b308d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.311827 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svm5f\" (UniqueName: \"kubernetes.io/projected/1f70d903-dc75-4d59-97b1-a88eff7b308d-kube-api-access-svm5f\") on node \"crc\" DevicePath \"\"" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.807343 4583 generic.go:334] "Generic (PLEG): container finished" podID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerID="6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2" exitCode=0 Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.807379 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-285nk" event={"ID":"1f70d903-dc75-4d59-97b1-a88eff7b308d","Type":"ContainerDied","Data":"6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2"} Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.807403 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-285nk" event={"ID":"1f70d903-dc75-4d59-97b1-a88eff7b308d","Type":"ContainerDied","Data":"0436fce3e069d7941d2f9e9b64a0e0ad882d84f3f450bd424fc249f75d320b91"} Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.807418 4583 scope.go:117] "RemoveContainer" containerID="6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.807512 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-285nk" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.825008 4583 scope.go:117] "RemoveContainer" containerID="d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.829538 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-285nk"] Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.833213 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-285nk"] Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.839127 4583 scope.go:117] "RemoveContainer" containerID="dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.868893 4583 scope.go:117] "RemoveContainer" containerID="6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2" Dec 09 15:28:35 crc kubenswrapper[4583]: E1209 15:28:35.869194 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2\": container with ID starting with 6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2 not found: ID does not exist" containerID="6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.869222 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2"} err="failed to get container status \"6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2\": rpc error: code = NotFound desc = could not find container \"6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2\": container with ID starting with 6b049b39fdcd13bf37a8a83cc148396882048cc24424e627f1a7bb7ed814e1f2 not found: ID does not exist" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.869241 4583 scope.go:117] "RemoveContainer" containerID="d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06" Dec 09 15:28:35 crc kubenswrapper[4583]: E1209 15:28:35.869508 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06\": container with ID starting with d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06 not found: ID does not exist" containerID="d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.869528 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06"} err="failed to get container status \"d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06\": rpc error: code = NotFound desc = could not find container \"d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06\": container with ID starting with d601e45464721e9586705d1677767273400133f8c2353de3c58329cacdb15c06 not found: ID does not exist" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.869541 4583 scope.go:117] "RemoveContainer" containerID="dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af" Dec 09 15:28:35 crc kubenswrapper[4583]: E1209 15:28:35.869797 4583 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af\": container with ID starting with dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af not found: ID does not exist" containerID="dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af" Dec 09 15:28:35 crc kubenswrapper[4583]: I1209 15:28:35.869831 4583 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af"} err="failed to get container status \"dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af\": rpc error: code = NotFound desc = could not find container \"dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af\": container with ID starting with dc69d795aff365f9775318f86ad2e1418decdc1d24b2edd1f58bb6adac3bd0af not found: ID does not exist" Dec 09 15:28:36 crc kubenswrapper[4583]: I1209 15:28:36.084597 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" path="/var/lib/kubelet/pods/1f70d903-dc75-4d59-97b1-a88eff7b308d/volumes" Dec 09 15:28:38 crc kubenswrapper[4583]: I1209 15:28:38.773877 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-j8gp4_5e34d8df-b4af-4781-a1c9-10de3c2a8717/control-plane-machine-set-operator/0.log" Dec 09 15:28:38 crc kubenswrapper[4583]: I1209 15:28:38.895589 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wjg5r_40b62828-d0ca-4668-b621-9b963a53be7d/machine-api-operator/0.log" Dec 09 15:28:38 crc kubenswrapper[4583]: I1209 15:28:38.907244 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wjg5r_40b62828-d0ca-4668-b621-9b963a53be7d/kube-rbac-proxy/0.log" Dec 09 15:28:46 crc kubenswrapper[4583]: I1209 15:28:46.467244 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-6fm4x_56190539-a710-4685-8aed-321aa76e1b89/cert-manager-controller/0.log" Dec 09 15:28:46 crc kubenswrapper[4583]: I1209 15:28:46.567696 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-dwsfd_f777d618-579e-4266-891d-694f44f53647/cert-manager-cainjector/0.log" Dec 09 15:28:46 crc kubenswrapper[4583]: I1209 15:28:46.617244 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-7j9s4_cb6cb5b2-1b81-4961-a765-2da559fc1e69/cert-manager-webhook/0.log" Dec 09 15:28:54 crc kubenswrapper[4583]: I1209 15:28:54.407873 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-tjcbp_457b330f-11f8-4f32-a2df-86568457b8bd/nmstate-console-plugin/0.log" Dec 09 15:28:54 crc kubenswrapper[4583]: I1209 15:28:54.490576 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ngtg2_94f96eaa-6548-4dbf-bcb6-50c416335322/nmstate-handler/0.log" Dec 09 15:28:54 crc kubenswrapper[4583]: I1209 15:28:54.542083 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-stxfm_ae1b2371-96e9-4b20-b9e5-df312ff49a42/kube-rbac-proxy/0.log" Dec 09 15:28:54 crc kubenswrapper[4583]: I1209 15:28:54.551277 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-stxfm_ae1b2371-96e9-4b20-b9e5-df312ff49a42/nmstate-metrics/0.log" Dec 09 15:28:54 crc kubenswrapper[4583]: I1209 15:28:54.657621 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-hkrd7_dffc95f7-da8f-4b23-b2b5-0b30e9233a08/nmstate-operator/0.log" Dec 09 15:28:54 crc kubenswrapper[4583]: I1209 15:28:54.683928 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-m4rct_e20829fc-8615-47ac-8a52-00bdad76e975/nmstate-webhook/0.log" Dec 09 15:29:02 crc kubenswrapper[4583]: I1209 15:29:02.050285 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fb468c877-9bdpc_b5bc8251-67d0-47c6-837a-b10f8214e1ca/kube-rbac-proxy/0.log" Dec 09 15:29:02 crc kubenswrapper[4583]: I1209 15:29:02.080224 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fb468c877-9bdpc_b5bc8251-67d0-47c6-837a-b10f8214e1ca/manager/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.197643 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-ff9846bd-xz6wb_bc4e3345-4a60-4071-a644-d51029c38bd2/cluster-logging-operator/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.288144 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-bmpbq_e622300e-10ba-43dc-92aa-53181f0be346/collector/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.335962 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-compactor-0_272c488c-ff6e-4650-9e82-8d5d45913fb1/loki-compactor/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.423428 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-distributor-76cc67bf56-t8lbn_092b4cea-9783-446e-885d-894810464116/loki-distributor/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.452239 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-7ff5bd49dc-bf46s_249a9969-3fdd-497d-bdda-3bea0481a4dd/opa/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.452400 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-7ff5bd49dc-bf46s_249a9969-3fdd-497d-bdda-3bea0481a4dd/gateway/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.553242 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-7ff5bd49dc-xpkxx_b0233b96-19b0-4b07-b935-ca38b3707646/gateway/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.576531 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-7ff5bd49dc-xpkxx_b0233b96-19b0-4b07-b935-ca38b3707646/opa/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.657232 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-index-gateway-0_b1709e50-5632-47cd-93bf-2de3b7e53b06/loki-index-gateway/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.713903 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-ingester-0_cc853021-5a5e-4f66-ab41-d70e667fa5f5/loki-ingester/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.798109 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-querier-5895d59bb8-pvl24_3a141193-2b55-4706-af21-6a17b6f53ff1/loki-querier/0.log" Dec 09 15:29:10 crc kubenswrapper[4583]: I1209 15:29:10.838505 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-query-frontend-84558f7c9f-drm5l_429ef0ec-d210-48ef-98c2-0ba02186f05d/loki-query-frontend/0.log" Dec 09 15:29:18 crc kubenswrapper[4583]: I1209 15:29:18.933150 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-97nqc_d6e8ec80-20a1-4023-aa5d-3a29154e66c3/kube-rbac-proxy/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.051944 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-97nqc_d6e8ec80-20a1-4023-aa5d-3a29154e66c3/controller/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.085922 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-frr-files/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.195310 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-frr-files/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.219732 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-metrics/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.222018 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-reloader/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.244882 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-reloader/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.359139 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-metrics/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.365171 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-frr-files/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.374074 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-metrics/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.380945 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-reloader/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.510441 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-reloader/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.512938 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-metrics/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.524387 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/cp-frr-files/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.532670 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/controller/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.669278 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/kube-rbac-proxy-frr/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.707588 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/kube-rbac-proxy/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.715645 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/frr/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.727958 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/frr-metrics/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.909866 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2zn75_55c9372b-521a-4b5f-adea-817d98f90de4/reloader/0.log" Dec 09 15:29:19 crc kubenswrapper[4583]: I1209 15:29:19.913014 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-68j5w_d7363ae4-f775-4300-b836-aeb7c384dcee/frr-k8s-webhook-server/0.log" Dec 09 15:29:20 crc kubenswrapper[4583]: I1209 15:29:20.040950 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f7f465b94-n2qjr_d23da855-0d1f-4866-8281-1fa9a175a1bd/manager/0.log" Dec 09 15:29:20 crc kubenswrapper[4583]: I1209 15:29:20.077439 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-696488d7c9-kfwfp_6cef6965-8116-435e-86e1-76c965e8c319/webhook-server/0.log" Dec 09 15:29:20 crc kubenswrapper[4583]: I1209 15:29:20.170182 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-58smd_e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f/kube-rbac-proxy/0.log" Dec 09 15:29:20 crc kubenswrapper[4583]: I1209 15:29:20.292987 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-58smd_e00bf7b0-93a9-4e3c-a8e8-0d57fa90e99f/speaker/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.289063 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/util/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.417258 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/pull/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.421505 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/pull/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.430070 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/util/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.536226 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/extract/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.540619 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/pull/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.543488 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8mns9b_56bb49ee-b985-4d14-9124-c9143393b8fb/util/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.666897 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/util/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.852593 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/pull/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.863847 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/pull/0.log" Dec 09 15:29:28 crc kubenswrapper[4583]: I1209 15:29:28.866705 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.004374 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.007893 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/extract/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.018722 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212flgpzh_665e4cac-0f3c-48ee-a078-92ced9aaae38/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.121893 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.257308 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.284726 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.300265 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.421866 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/extract/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.421908 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.425310 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fsvvgx_adf5e854-c962-4889-8737-6d5197f9653a/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.533302 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.673346 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.679399 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.689849 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.819130 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/util/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.823323 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/pull/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.825456 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83z4g48_3ff4a7f6-d61d-4fae-9806-b831561f37a5/extract/0.log" Dec 09 15:29:29 crc kubenswrapper[4583]: I1209 15:29:29.933873 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/extract-utilities/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.095322 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/extract-utilities/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.095497 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/extract-content/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.096324 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/extract-content/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.234424 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/extract-utilities/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.240441 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/extract-content/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.385421 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/extract-utilities/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.504330 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kjb9f_e305bf53-d531-4bbf-a6a7-38ad3a3c7189/registry-server/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.563352 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/extract-utilities/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.589449 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/extract-content/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.607863 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/extract-content/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.724859 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/extract-utilities/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.746873 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/extract-content/0.log" Dec 09 15:29:30 crc kubenswrapper[4583]: I1209 15:29:30.925802 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d7mjv_51e4c62b-29d0-4662-916c-5c9bfbd33598/marketplace-operator/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.020061 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2crjx_cf53a407-5aab-44dc-a635-a8e2f68443cf/registry-server/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.034235 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/extract-utilities/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.145726 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/extract-utilities/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.149035 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/extract-content/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.177513 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/extract-content/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.282914 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/extract-content/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.286887 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/extract-utilities/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.357148 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wzxcc_cbdedc1b-dd30-46cf-b517-3c3f626560a3/registry-server/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.383426 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/extract-utilities/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.517149 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/extract-utilities/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.531117 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/extract-content/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.537375 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/extract-content/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.651384 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/extract-utilities/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.653522 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/extract-content/0.log" Dec 09 15:29:31 crc kubenswrapper[4583]: I1209 15:29:31.912912 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c46x5_2dfe0b67-0d5e-406e-bbd3-de46a80d2a8c/registry-server/0.log" Dec 09 15:29:47 crc kubenswrapper[4583]: I1209 15:29:47.428234 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fb468c877-9bdpc_b5bc8251-67d0-47c6-837a-b10f8214e1ca/kube-rbac-proxy/0.log" Dec 09 15:29:47 crc kubenswrapper[4583]: I1209 15:29:47.602099 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fb468c877-9bdpc_b5bc8251-67d0-47c6-837a-b10f8214e1ca/manager/0.log" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.161638 4583 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6"] Dec 09 15:30:00 crc kubenswrapper[4583]: E1209 15:30:00.162159 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="extract-utilities" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.162184 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="extract-utilities" Dec 09 15:30:00 crc kubenswrapper[4583]: E1209 15:30:00.162196 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="extract-content" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.162202 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="extract-content" Dec 09 15:30:00 crc kubenswrapper[4583]: E1209 15:30:00.162215 4583 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="registry-server" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.162221 4583 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="registry-server" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.162306 4583 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f70d903-dc75-4d59-97b1-a88eff7b308d" containerName="registry-server" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.162705 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.164295 4583 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.164524 4583 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.170056 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6"] Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.253536 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-secret-volume\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.253585 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-config-volume\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.253604 4583 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twvxf\" (UniqueName: \"kubernetes.io/projected/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-kube-api-access-twvxf\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.354942 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-secret-volume\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.354997 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-config-volume\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.355015 4583 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twvxf\" (UniqueName: \"kubernetes.io/projected/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-kube-api-access-twvxf\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.355656 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-config-volume\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.359028 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-secret-volume\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.368313 4583 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twvxf\" (UniqueName: \"kubernetes.io/projected/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-kube-api-access-twvxf\") pod \"collect-profiles-29421570-2hxb6\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.478753 4583 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:00 crc kubenswrapper[4583]: I1209 15:30:00.820049 4583 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6"] Dec 09 15:30:00 crc kubenswrapper[4583]: W1209 15:30:00.827552 4583 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9effaf2b_10c0_415e_a62e_5c86ebfc46d5.slice/crio-ab0b859e1d7af48758f5ea04955766d6f4e8cc6c7b6aa0deaf148fe36064cdd8 WatchSource:0}: Error finding container ab0b859e1d7af48758f5ea04955766d6f4e8cc6c7b6aa0deaf148fe36064cdd8: Status 404 returned error can't find the container with id ab0b859e1d7af48758f5ea04955766d6f4e8cc6c7b6aa0deaf148fe36064cdd8 Dec 09 15:30:01 crc kubenswrapper[4583]: I1209 15:30:01.198896 4583 generic.go:334] "Generic (PLEG): container finished" podID="9effaf2b-10c0-415e-a62e-5c86ebfc46d5" containerID="dc5b7ec1ab3a81f7a19145eaf41fb9f78047513bb58f5a383f81e123edecbee4" exitCode=0 Dec 09 15:30:01 crc kubenswrapper[4583]: I1209 15:30:01.199070 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" event={"ID":"9effaf2b-10c0-415e-a62e-5c86ebfc46d5","Type":"ContainerDied","Data":"dc5b7ec1ab3a81f7a19145eaf41fb9f78047513bb58f5a383f81e123edecbee4"} Dec 09 15:30:01 crc kubenswrapper[4583]: I1209 15:30:01.199092 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" event={"ID":"9effaf2b-10c0-415e-a62e-5c86ebfc46d5","Type":"ContainerStarted","Data":"ab0b859e1d7af48758f5ea04955766d6f4e8cc6c7b6aa0deaf148fe36064cdd8"} Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.422315 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.580843 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-secret-volume\") pod \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.580887 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twvxf\" (UniqueName: \"kubernetes.io/projected/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-kube-api-access-twvxf\") pod \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.580976 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-config-volume\") pod \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\" (UID: \"9effaf2b-10c0-415e-a62e-5c86ebfc46d5\") " Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.581425 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-config-volume" (OuterVolumeSpecName: "config-volume") pod "9effaf2b-10c0-415e-a62e-5c86ebfc46d5" (UID: "9effaf2b-10c0-415e-a62e-5c86ebfc46d5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.584904 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9effaf2b-10c0-415e-a62e-5c86ebfc46d5" (UID: "9effaf2b-10c0-415e-a62e-5c86ebfc46d5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.589898 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-kube-api-access-twvxf" (OuterVolumeSpecName: "kube-api-access-twvxf") pod "9effaf2b-10c0-415e-a62e-5c86ebfc46d5" (UID: "9effaf2b-10c0-415e-a62e-5c86ebfc46d5"). InnerVolumeSpecName "kube-api-access-twvxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.682359 4583 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-config-volume\") on node \"crc\" DevicePath \"\"" Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.682386 4583 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 09 15:30:02 crc kubenswrapper[4583]: I1209 15:30:02.682396 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twvxf\" (UniqueName: \"kubernetes.io/projected/9effaf2b-10c0-415e-a62e-5c86ebfc46d5-kube-api-access-twvxf\") on node \"crc\" DevicePath \"\"" Dec 09 15:30:03 crc kubenswrapper[4583]: I1209 15:30:03.209250 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" event={"ID":"9effaf2b-10c0-415e-a62e-5c86ebfc46d5","Type":"ContainerDied","Data":"ab0b859e1d7af48758f5ea04955766d6f4e8cc6c7b6aa0deaf148fe36064cdd8"} Dec 09 15:30:03 crc kubenswrapper[4583]: I1209 15:30:03.209463 4583 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab0b859e1d7af48758f5ea04955766d6f4e8cc6c7b6aa0deaf148fe36064cdd8" Dec 09 15:30:03 crc kubenswrapper[4583]: I1209 15:30:03.209289 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29421570-2hxb6" Dec 09 15:30:16 crc kubenswrapper[4583]: E1209 15:30:16.083379 4583 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:30:16 crc kubenswrapper[4583]: E1209 15:30:16.083792 4583 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" image="38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05" Dec 09 15:30:16 crc kubenswrapper[4583]: E1209 15:30:16.083883 4583 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66tvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4vftt_openstack-operators(9726924c-7078-4a99-adbe-54e2070ef9da): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \"http://38.102.83.5:5001/v2/\": dial tcp 38.102.83.5:5001: i/o timeout" logger="UnhandledError" Dec 09 15:30:16 crc kubenswrapper[4583]: E1209 15:30:16.085038 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05: pinging container registry 38.102.83.5:5001: Get \\\"http://38.102.83.5:5001/v2/\\\": dial tcp 38.102.83.5:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:30:27 crc kubenswrapper[4583]: E1209 15:30:27.081265 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:30:28 crc kubenswrapper[4583]: I1209 15:30:28.331567 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9" containerID="315890ee797281243a5c533e5def2ad5598090a5a9ff994d598331eaa1f20155" exitCode=0 Dec 09 15:30:28 crc kubenswrapper[4583]: I1209 15:30:28.331607 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q8ghq/must-gather-txrz4" event={"ID":"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9","Type":"ContainerDied","Data":"315890ee797281243a5c533e5def2ad5598090a5a9ff994d598331eaa1f20155"} Dec 09 15:30:28 crc kubenswrapper[4583]: I1209 15:30:28.332100 4583 scope.go:117] "RemoveContainer" containerID="315890ee797281243a5c533e5def2ad5598090a5a9ff994d598331eaa1f20155" Dec 09 15:30:28 crc kubenswrapper[4583]: I1209 15:30:28.536587 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q8ghq_must-gather-txrz4_7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9/gather/0.log" Dec 09 15:30:29 crc kubenswrapper[4583]: I1209 15:30:29.391714 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:30:29 crc kubenswrapper[4583]: I1209 15:30:29.392722 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.244852 4583 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q8ghq/must-gather-txrz4"] Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.245497 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-q8ghq/must-gather-txrz4" podUID="7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9" containerName="copy" containerID="cri-o://681c480a279c9b41761366d3edc46937bc63af7757165a835be9a05cf084ec62" gracePeriod=2 Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.248516 4583 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q8ghq/must-gather-txrz4"] Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.366363 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q8ghq_must-gather-txrz4_7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9/copy/0.log" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.366998 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9" containerID="681c480a279c9b41761366d3edc46937bc63af7757165a835be9a05cf084ec62" exitCode=143 Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.567649 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q8ghq_must-gather-txrz4_7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9/copy/0.log" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.567935 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.599527 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-must-gather-output\") pod \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.599614 4583 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzvz6\" (UniqueName: \"kubernetes.io/projected/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-kube-api-access-rzvz6\") pod \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\" (UID: \"7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9\") " Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.604584 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-kube-api-access-rzvz6" (OuterVolumeSpecName: "kube-api-access-rzvz6") pod "7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9" (UID: "7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9"). InnerVolumeSpecName "kube-api-access-rzvz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.650671 4583 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9" (UID: "7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.701818 4583 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzvz6\" (UniqueName: \"kubernetes.io/projected/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-kube-api-access-rzvz6\") on node \"crc\" DevicePath \"\"" Dec 09 15:30:35 crc kubenswrapper[4583]: I1209 15:30:35.701853 4583 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 09 15:30:36 crc kubenswrapper[4583]: I1209 15:30:36.084533 4583 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9" path="/var/lib/kubelet/pods/7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9/volumes" Dec 09 15:30:36 crc kubenswrapper[4583]: I1209 15:30:36.374299 4583 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q8ghq_must-gather-txrz4_7ccbc827-ac7d-444d-a9ae-b6f1a507e6c9/copy/0.log" Dec 09 15:30:36 crc kubenswrapper[4583]: I1209 15:30:36.374621 4583 scope.go:117] "RemoveContainer" containerID="681c480a279c9b41761366d3edc46937bc63af7757165a835be9a05cf084ec62" Dec 09 15:30:36 crc kubenswrapper[4583]: I1209 15:30:36.374716 4583 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q8ghq/must-gather-txrz4" Dec 09 15:30:36 crc kubenswrapper[4583]: I1209 15:30:36.390843 4583 scope.go:117] "RemoveContainer" containerID="315890ee797281243a5c533e5def2ad5598090a5a9ff994d598331eaa1f20155" Dec 09 15:30:41 crc kubenswrapper[4583]: E1209 15:30:41.080134 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:30:56 crc kubenswrapper[4583]: E1209 15:30:56.079716 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:30:59 crc kubenswrapper[4583]: I1209 15:30:59.391476 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:30:59 crc kubenswrapper[4583]: I1209 15:30:59.391721 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:31:08 crc kubenswrapper[4583]: E1209 15:31:08.079725 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:31:21 crc kubenswrapper[4583]: E1209 15:31:21.079834 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.391619 4583 patch_prober.go:28] interesting pod/machine-config-daemon-jmtvl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.392002 4583 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.392037 4583 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.392535 4583 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5a7c52aeabced8f171b4e08eaf853c958ac8f440569632537e9b96a6d1620241"} pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.392583 4583 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" podUID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerName="machine-config-daemon" containerID="cri-o://5a7c52aeabced8f171b4e08eaf853c958ac8f440569632537e9b96a6d1620241" gracePeriod=600 Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.613057 4583 generic.go:334] "Generic (PLEG): container finished" podID="7ce1d933-c482-444a-86c2-3e9f1315eb16" containerID="5a7c52aeabced8f171b4e08eaf853c958ac8f440569632537e9b96a6d1620241" exitCode=0 Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.613128 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerDied","Data":"5a7c52aeabced8f171b4e08eaf853c958ac8f440569632537e9b96a6d1620241"} Dec 09 15:31:29 crc kubenswrapper[4583]: I1209 15:31:29.613254 4583 scope.go:117] "RemoveContainer" containerID="3fb012c0f9f753a7ba2eb8d40ed5028e9dbed625541d03c5d3bab15eae8baa26" Dec 09 15:31:30 crc kubenswrapper[4583]: I1209 15:31:30.619023 4583 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jmtvl" event={"ID":"7ce1d933-c482-444a-86c2-3e9f1315eb16","Type":"ContainerStarted","Data":"7867d70f178fdc5e45d82241cd700f5dc0fc0b6386fde71047d518fc902b3d87"} Dec 09 15:31:34 crc kubenswrapper[4583]: E1209 15:31:34.080543 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:31:49 crc kubenswrapper[4583]: E1209 15:31:49.080109 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:32:02 crc kubenswrapper[4583]: E1209 15:32:02.082080 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:32:14 crc kubenswrapper[4583]: E1209 15:32:14.080094 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:32:26 crc kubenswrapper[4583]: E1209 15:32:26.080478 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:32:38 crc kubenswrapper[4583]: E1209 15:32:38.079981 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:32:51 crc kubenswrapper[4583]: E1209 15:32:51.079395 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:33:02 crc kubenswrapper[4583]: E1209 15:33:02.082252 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" Dec 09 15:33:14 crc kubenswrapper[4583]: E1209 15:33:14.079469 4583 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.5:5001/openstack-k8s-operators/openstack-operator-index:b7b08af6c20d940e7a79480f938b88859bf69d05\\\"\"" pod="openstack-operators/openstack-operator-index-4vftt" podUID="9726924c-7078-4a99-adbe-54e2070ef9da" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116040501024436 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116040501017353 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116034202016477 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116034202015447 5ustar corecore